Elasticsearch: Fix using multiple bucket script aggregations when only grouping by terms (#24064)

* In the parser when it was configurate more bucket it put the formula in name column

* Tests

* Tests

* Tests

* Tests

* Tests

* Tests

* Process/Tests - backend (Go)

* Update pkg/tsdb/elasticsearch/response_parser.go

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>

* reverse

* Update pkg/tsdb/elasticsearch/response_parser.go

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>
This commit is contained in:
Marcelo Nunes Alves 2020-07-09 10:21:19 -03:00 committed by GitHub
parent 49b5fc4b9a
commit be961c5466
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 162 additions and 1 deletions

View File

@ -372,6 +372,10 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
if len(otherMetrics) > 1 {
metricName += " " + metric.Field
if metric.Type == "bucket_script" {
//Use the formula in the column name
metricName = metric.Settings.Get("script").MustString("")
}
}
addMetricValue(&values, metricName, castToNullFloat(bucket.GetPath(metric.ID, "value")))

View File

@ -865,6 +865,80 @@ func TestResponseParser(t *testing.T) {
So(seriesThree.Points[1][1].Float64, ShouldEqual, 2000)
})
Convey("Terms with two bucket_script", func() {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [
{ "id": "1", "type": "sum", "field": "@value" },
{ "id": "3", "type": "max", "field": "@value" },
{
"id": "4",
"field": "select field",
"pipelineVariables": [{ "name": "var1", "pipelineAgg": "1" }, { "name": "var2", "pipelineAgg": "3" }],
"settings": { "script": "params.var1 * params.var2" },
"type": "bucket_script"
},
{
"id": "5",
"field": "select field",
"pipelineVariables": [{ "name": "var1", "pipelineAgg": "1" }, { "name": "var2", "pipelineAgg": "3" }],
"settings": { "script": "params.var1 * params.var2 * 2" },
"type": "bucket_script"
}
],
"bucketAggs": [{ "type": "terms", "field": "@timestamp", "id": "2" }]
}`,
}
response := `{
"responses": [
{
"aggregations": {
"2": {
"buckets": [
{
"1": { "value": 2 },
"3": { "value": 3 },
"4": { "value": 6 },
"5": { "value": 24 },
"doc_count": 60,
"key": 1000
},
{
"1": { "value": 3 },
"3": { "value": 4 },
"4": { "value": 12 },
"5": { "value": 48 },
"doc_count": 60,
"key": 2000
}
]
}
}
}
]
}`
rp, err := newResponseParserForTest(targets, response)
So(err, ShouldBeNil)
result, err := rp.getTimeSeries()
So(err, ShouldBeNil)
So(result.Results, ShouldHaveLength, 1)
queryRes := result.Results["A"]
So(queryRes, ShouldNotBeNil)
So(queryRes.Tables[0].Rows, ShouldHaveLength, 2)
So(queryRes.Tables[0].Columns[1].Text, ShouldEqual, "Sum")
So(queryRes.Tables[0].Columns[2].Text, ShouldEqual, "Max")
So(queryRes.Tables[0].Columns[3].Text, ShouldEqual, "params.var1 * params.var2")
So(queryRes.Tables[0].Columns[4].Text, ShouldEqual, "params.var1 * params.var2 * 2")
So(queryRes.Tables[0].Rows[0][1].(null.Float).Float64, ShouldEqual, 2)
So(queryRes.Tables[0].Rows[0][2].(null.Float).Float64, ShouldEqual, 3)
So(queryRes.Tables[0].Rows[0][3].(null.Float).Float64, ShouldEqual, 6)
So(queryRes.Tables[0].Rows[0][4].(null.Float).Float64, ShouldEqual, 24)
So(queryRes.Tables[0].Rows[1][1].(null.Float).Float64, ShouldEqual, 3)
So(queryRes.Tables[0].Rows[1][2].(null.Float).Float64, ShouldEqual, 4)
So(queryRes.Tables[0].Rows[1][3].(null.Float).Float64, ShouldEqual, 12)
So(queryRes.Tables[0].Rows[1][4].(null.Float).Float64, ShouldEqual, 48)
})
// Convey("Raw documents query", func() {
// targets := map[string]string{
// "A": `{

View File

@ -175,6 +175,10 @@ export class ElasticResponse {
// if more of the same metric type include field field name in property
if (otherMetrics.length > 1) {
metricName += ' ' + metric.field;
if (metric.type === 'bucket_script') {
//Use the formula in the column name
metricName = metric.settings.script;
}
}
addMetricValue(values, metricName, bucket[metric.id].value);

View File

@ -811,7 +811,6 @@ describe('ElasticResponse', () => {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 3 series', () => {
expect(result.data.length).toBe(3);
expect(result.data[0].datapoints.length).toBe(2);
@ -827,6 +826,86 @@ describe('ElasticResponse', () => {
});
});
describe('terms with bucket_script and two scripts', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [
{ id: '1', type: 'sum', field: '@value' },
{ id: '3', type: 'max', field: '@value' },
{
id: '4',
field: 'select field',
pipelineVariables: [
{ name: 'var1', pipelineAgg: '1' },
{ name: 'var2', pipelineAgg: '3' },
],
settings: { script: 'params.var1 * params.var2' },
type: 'bucket_script',
},
{
id: '5',
field: 'select field',
pipelineVariables: [
{ name: 'var1', pipelineAgg: '1' },
{ name: 'var2', pipelineAgg: '3' },
],
settings: { script: 'params.var1 * params.var2 * 4' },
type: 'bucket_script',
},
],
bucketAggs: [{ type: 'terms', field: '@timestamp', id: '2' }],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
1: { value: 2 },
3: { value: 3 },
4: { value: 6 },
5: { value: 24 },
doc_count: 60,
key: 1000,
},
{
1: { value: 3 },
3: { value: 4 },
4: { value: 12 },
5: { value: 48 },
doc_count: 60,
key: 2000,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 rows with 5 columns', () => {
expect(result.data[0].columns.length).toBe(5);
expect(result.data[0].rows.length).toBe(2);
expect(result.data[0].rows[0][1]).toBe(2);
expect(result.data[0].rows[0][2]).toBe(3);
expect(result.data[0].rows[0][3]).toBe(6);
expect(result.data[0].rows[0][4]).toBe(24);
expect(result.data[0].rows[1][1]).toBe(3);
expect(result.data[0].rows[1][2]).toBe(4);
expect(result.data[0].rows[1][3]).toBe(12);
expect(result.data[0].rows[1][4]).toBe(48);
});
});
describe('simple logs query and count', () => {
const targets: any = [
{