Elasticsearch: Fix legend generated on backend (#66039)

* Elasticsearch: Fix legend

* Remove comments

* fix adding name to config

* Fix lint
This commit is contained in:
Ivana Huckova 2023-04-11 13:29:38 +02:00 committed by GitHub
parent 05f7ae6c69
commit 3e4eb9c93e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 131 additions and 81 deletions

View File

@ -85,7 +85,7 @@ func parseResponse(responses []*es.SearchResponse, targets []*Query, configuredF
if err != nil { if err != nil {
return &backend.QueryDataResponse{}, err return &backend.QueryDataResponse{}, err
} }
nameFrames(queryRes, target) nameFields(queryRes, target)
trimDatapoints(queryRes, target) trimDatapoints(queryRes, target)
result.Responses[target.RefID] = queryRes result.Responses[target.RefID] = queryRes
@ -806,7 +806,7 @@ func getSortedLabelValues(labels data.Labels) []string {
return values return values
} }
func nameFrames(queryResult backend.DataResponse, target *Query) { func nameFields(queryResult backend.DataResponse, target *Query) {
set := make(map[string]struct{}) set := make(map[string]struct{})
frames := queryResult.Frames frames := queryResult.Frames
for _, v := range frames { for _, v := range frames {
@ -825,7 +825,10 @@ func nameFrames(queryResult backend.DataResponse, target *Query) {
// another is "number" // another is "number"
valueField := frame.Fields[1] valueField := frame.Fields[1]
fieldName := getFieldName(*valueField, target, metricTypeCount) fieldName := getFieldName(*valueField, target, metricTypeCount)
frame.Name = fieldName if valueField.Config == nil {
valueField.Config = &data.FieldConfig{}
}
valueField.Config.DisplayNameFromDS = fieldName
} }
} }
} }
@ -895,7 +898,7 @@ func getFieldName(dataField data.Field, target *Query, metricTypeCount int) stri
found := false found := false
for _, metric := range target.Metrics { for _, metric := range target.Metrics {
if metric.ID == field { if metric.ID == field {
metricName += " " + describeMetric(metric.Type, field) metricName += " " + describeMetric(metric.Type, metric.Field)
found = true found = true
} }
} }

View File

@ -61,7 +61,18 @@ func requireFloatAt(t *testing.T, expected float64, field *data.Field, index int
} }
func requireTimeSeriesName(t *testing.T, expected string, frame *data.Frame) { func requireTimeSeriesName(t *testing.T, expected string, frame *data.Frame) {
require.Equal(t, expected, frame.Name) getField := func() *data.Field {
for _, field := range frame.Fields {
if field.Type() != data.FieldTypeTime {
return field
}
}
return nil
}
field := getField()
require.NotNil(t, expected, field.Config)
require.Equal(t, expected, field.Config.DisplayNameFromDS)
} }
func TestRefIdMatching(t *testing.T) { func TestRefIdMatching(t *testing.T) {

View File

@ -63,7 +63,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "Count") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Count")
}) })
t.Run("Simple query count & avg aggregation", func(t *testing.T) { t.Run("Simple query count & avg aggregation", func(t *testing.T) {
@ -112,7 +112,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "Count") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Count")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -121,7 +121,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "Average value") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Average value")
}) })
t.Run("Single group by query one metric", func(t *testing.T) { t.Run("Single group by query one metric", func(t *testing.T) {
@ -175,7 +175,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "server1") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server1")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -183,7 +183,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "server2") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server2")
}) })
t.Run("Single group by query two metrics", func(t *testing.T) { t.Run("Single group by query two metrics", func(t *testing.T) {
@ -244,7 +244,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "server1 Count") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server1 Count")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -252,7 +252,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "server1 Average @value") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server1 Average @value")
frame = dataframes[2] frame = dataframes[2]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -260,7 +260,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "server2 Count") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server2 Count")
frame = dataframes[3] frame = dataframes[3]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -268,7 +268,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "server2 Average @value") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server2 Average @value")
}) })
t.Run("With percentiles", func(t *testing.T) { t.Run("With percentiles", func(t *testing.T) {
@ -316,7 +316,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "p75") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "p75")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -324,7 +324,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "p90") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "p90")
}) })
t.Run("With extended stats", func(t *testing.T) { t.Run("With extended stats", func(t *testing.T) {
@ -397,7 +397,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "server1 Max") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server1 Max")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -405,7 +405,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "server1 Std Dev Lower") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server1 Std Dev Lower")
frame = dataframes[2] frame = dataframes[2]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -413,7 +413,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "server1 Std Dev Upper") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server1 Std Dev Upper")
frame = dataframes[3] frame = dataframes[3]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -421,7 +421,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "server2 Max") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server2 Max")
frame = dataframes[4] frame = dataframes[4]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -429,7 +429,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "server2 Std Dev Lower") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server2 Std Dev Lower")
frame = dataframes[5] frame = dataframes[5]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -437,7 +437,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "server2 Std Dev Upper") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server2 Std Dev Upper")
}) })
t.Run("Single group by with alias pattern", func(t *testing.T) { t.Run("Single group by with alias pattern", func(t *testing.T) {
@ -500,7 +500,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "server1 Count and {{not_exist}} server1") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server1 Count and {{not_exist}} server1")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -508,7 +508,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "server2 Count and {{not_exist}} server2") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "server2 Count and {{not_exist}} server2")
frame = dataframes[2] frame = dataframes[2]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -516,7 +516,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "0 Count and {{not_exist}} 0") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "0 Count and {{not_exist}} 0")
}) })
t.Run("Histogram response", func(t *testing.T) { t.Run("Histogram response", func(t *testing.T) {
@ -602,7 +602,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "@metric:cpu") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "@metric:cpu")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -610,7 +610,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "@metric:logins.count") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "@metric:logins.count")
}) })
t.Run("With drop first and last aggregation (numeric)", func(t *testing.T) { t.Run("With drop first and last aggregation (numeric)", func(t *testing.T) {
@ -670,7 +670,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "Average") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Average")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -678,7 +678,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "Count") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Count")
}) })
t.Run("With drop first and last aggregation (string)", func(t *testing.T) { t.Run("With drop first and last aggregation (string)", func(t *testing.T) {
@ -738,7 +738,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "Average") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Average")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -746,7 +746,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 1) require.Equal(t, frame.Fields[0].Len(), 1)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 1) require.Equal(t, frame.Fields[1].Len(), 1)
assert.Equal(t, frame.Name, "Count") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Count")
}) })
t.Run("Larger trimEdges value", func(t *testing.T) { t.Run("Larger trimEdges value", func(t *testing.T) {
@ -949,7 +949,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "Sum @value") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Sum @value")
frame = dataframes[1] frame = dataframes[1]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -957,7 +957,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "Max @value") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Max @value")
frame = dataframes[2] frame = dataframes[2]
require.Len(t, frame.Fields, 2) require.Len(t, frame.Fields, 2)
@ -965,7 +965,7 @@ func TestResponseParser(t *testing.T) {
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName) require.Equal(t, frame.Fields[1].Name, data.TimeSeriesValueFieldName)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "Sum @value * Max @value") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Sum @value * Max @value")
}) })
t.Run("Terms with two bucket_script", func(t *testing.T) { t.Run("Terms with two bucket_script", func(t *testing.T) {
@ -1543,7 +1543,7 @@ func TestResponseParser(t *testing.T) {
assert.Len(t, frame.Fields, 2) assert.Len(t, frame.Fields, 2)
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "Top Metrics @value") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Top Metrics @value")
v, _ := frame.FloatAt(0, 0) v, _ := frame.FloatAt(0, 0)
assert.Equal(t, 1609459200000., v) assert.Equal(t, 1609459200000., v)
v, _ = frame.FloatAt(1, 0) v, _ = frame.FloatAt(1, 0)
@ -1560,7 +1560,7 @@ func TestResponseParser(t *testing.T) {
assert.Len(t, frame.Fields, 2) assert.Len(t, frame.Fields, 2)
require.Equal(t, frame.Fields[0].Len(), 2) require.Equal(t, frame.Fields[0].Len(), 2)
require.Equal(t, frame.Fields[1].Len(), 2) require.Equal(t, frame.Fields[1].Len(), 2)
assert.Equal(t, frame.Name, "Top Metrics @anotherValue") assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Top Metrics @anotherValue")
v, _ = frame.FloatAt(0, 0) v, _ = frame.FloatAt(0, 0)
assert.Equal(t, 1609459200000., v) assert.Equal(t, 1609459200000., v)
v, _ = frame.FloatAt(1, 0) v, _ = frame.FloatAt(1, 0)

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: Count // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+------------------+ // +-------------------------------+------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -26,7 +26,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "Count",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -49,7 +48,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "Count"
}
} }
] ]
}, },

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: Average counter // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-----------------------------------+------------------+ // +-----------------------------------+------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -26,7 +26,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "Average counter",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -49,7 +48,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "Average counter"
}
} }
] ]
}, },

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val3 Max float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -28,7 +28,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val3 Min float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -49,7 +49,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val2 Max float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -70,7 +70,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val2 Min float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -91,7 +91,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val1 Max float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -112,7 +112,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val1 Min float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+--------------------+ // +-------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -131,7 +131,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "val3 Max float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -156,6 +155,9 @@
}, },
"labels": { "labels": {
"label": "val3" "label": "val3"
},
"config": {
"displayNameFromDS": "val3 Max float"
} }
} }
] ]
@ -177,7 +179,6 @@
}, },
{ {
"schema": { "schema": {
"name": "val3 Min float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -202,6 +203,9 @@
}, },
"labels": { "labels": {
"label": "val3" "label": "val3"
},
"config": {
"displayNameFromDS": "val3 Min float"
} }
} }
] ]
@ -223,7 +227,6 @@
}, },
{ {
"schema": { "schema": {
"name": "val2 Max float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -248,6 +251,9 @@
}, },
"labels": { "labels": {
"label": "val2" "label": "val2"
},
"config": {
"displayNameFromDS": "val2 Max float"
} }
} }
] ]
@ -269,7 +275,6 @@
}, },
{ {
"schema": { "schema": {
"name": "val2 Min float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -294,6 +299,9 @@
}, },
"labels": { "labels": {
"label": "val2" "label": "val2"
},
"config": {
"displayNameFromDS": "val2 Min float"
} }
} }
] ]
@ -315,7 +323,6 @@
}, },
{ {
"schema": { "schema": {
"name": "val1 Max float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -340,6 +347,9 @@
}, },
"labels": { "labels": {
"label": "val1" "label": "val1"
},
"config": {
"displayNameFromDS": "val1 Max float"
} }
} }
] ]
@ -361,7 +371,6 @@
}, },
{ {
"schema": { "schema": {
"name": "val1 Min float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -386,6 +395,9 @@
}, },
"labels": { "labels": {
"label": "val1" "label": "val1"
},
"config": {
"displayNameFromDS": "val1 Min float"
} }
} }
] ]

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: Std Dev Lower counter // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-----------------------------------+--------------------+ // +-----------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -28,7 +28,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: Std Dev Upper counter // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-----------------------------------+--------------------+ // +-----------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -47,7 +47,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "Std Dev Lower counter",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -70,7 +69,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "Std Dev Lower counter"
}
} }
] ]
}, },
@ -91,7 +93,6 @@
}, },
{ {
"schema": { "schema": {
"name": "Std Dev Upper counter",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -114,7 +115,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "Std Dev Upper counter"
}
} }
] ]
}, },

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: Max float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+-------------------+ // +-------------------------------+-------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -26,7 +26,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "Max float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -49,7 +48,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "Max float"
}
} }
] ]
}, },

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: Min float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-------------------------------+---------------------+ // +-------------------------------+---------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -26,7 +26,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "Min float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -49,7 +48,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "Min float"
}
} }
] ]
}, },

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: p25.0 counter // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-----------------------------------+------------------+ // +-----------------------------------+------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -28,7 +28,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: p75.0 counter // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-----------------------------------+------------------+ // +-----------------------------------+------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -47,7 +47,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "p25.0 counter",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -70,7 +69,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "p25.0 counter"
}
} }
] ]
}, },
@ -91,7 +93,6 @@
}, },
{ {
"schema": { "schema": {
"name": "p75.0 counter",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -114,7 +115,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "p75.0 counter"
}
} }
] ]
}, },

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val3 // Name:
// Dimensions: 2 Fields by 4 Rows // Dimensions: 2 Fields by 4 Rows
// +-----------------------------------+--------------------+ // +-----------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -29,7 +29,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val2 // Name:
// Dimensions: 2 Fields by 4 Rows // Dimensions: 2 Fields by 4 Rows
// +-----------------------------------+--------------------+ // +-----------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -51,7 +51,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: val1 // Name:
// Dimensions: 2 Fields by 4 Rows // Dimensions: 2 Fields by 4 Rows
// +-----------------------------------+--------------------+ // +-----------------------------------+--------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -71,7 +71,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "val3",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -96,6 +95,9 @@
}, },
"labels": { "labels": {
"label": "val3" "label": "val3"
},
"config": {
"displayNameFromDS": "val3"
} }
} }
] ]
@ -119,7 +121,6 @@
}, },
{ {
"schema": { "schema": {
"name": "val2",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -144,6 +145,9 @@
}, },
"labels": { "labels": {
"label": "val2" "label": "val2"
},
"config": {
"displayNameFromDS": "val2"
} }
} }
] ]
@ -167,7 +171,6 @@
}, },
{ {
"schema": { "schema": {
"name": "val1",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -192,6 +195,9 @@
}, },
"labels": { "labels": {
"label": "val1" "label": "val1"
},
"config": {
"displayNameFromDS": "val1"
} }
} }
] ]

View File

@ -7,7 +7,7 @@
// 0 // 0
// ] // ]
// } // }
// Name: Top Metrics float // Name:
// Dimensions: 2 Fields by 3 Rows // Dimensions: 2 Fields by 3 Rows
// +-----------------------------------+-------------------+ // +-----------------------------------+-------------------+
// | Name: Time | Name: Value | // | Name: Time | Name: Value |
@ -26,7 +26,6 @@
"frames": [ "frames": [
{ {
"schema": { "schema": {
"name": "Top Metrics float",
"meta": { "meta": {
"type": "timeseries-multi", "type": "timeseries-multi",
"typeVersion": [ "typeVersion": [
@ -49,7 +48,10 @@
"frame": "float64", "frame": "float64",
"nullable": true "nullable": true
}, },
"labels": {} "labels": {},
"config": {
"displayNameFromDS": "Top Metrics float"
}
} }
] ]
}, },