mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Elasticsearch: Add query's refId to each series returned by a query (#27614)
This commit is contained in:
@@ -3,10 +3,256 @@ import { ElasticResponse } from '../elastic_response';
|
||||
import flatten from 'app/core/utils/flatten';
|
||||
|
||||
describe('ElasticResponse', () => {
|
||||
let targets;
|
||||
let targets: any;
|
||||
let response: any;
|
||||
let result: any;
|
||||
|
||||
describe('refId matching', () => {
|
||||
// We default to the old table structure to ensure backward compatibility,
|
||||
// therefore we only process responses as DataFrames when there's at least one
|
||||
// raw_data (new) query type.
|
||||
// We should test if refId gets populated wether there's such type of query or not
|
||||
const countQuery = {
|
||||
target: {
|
||||
refId: 'COUNT_GROUPBY_DATE_HISTOGRAM',
|
||||
metrics: [{ type: 'count', id: 'c_1' }],
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: 'c_2' }],
|
||||
},
|
||||
response: {
|
||||
aggregations: {
|
||||
c_2: {
|
||||
buckets: [
|
||||
{
|
||||
doc_count: 10,
|
||||
key: 1000,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const countGroupByHistogramQuery = {
|
||||
target: {
|
||||
refId: 'COUNT_GROUPBY_HISTOGRAM',
|
||||
metrics: [{ type: 'count', id: 'h_3' }],
|
||||
bucketAggs: [{ type: 'histogram', field: 'bytes', id: 'h_4' }],
|
||||
},
|
||||
response: {
|
||||
aggregations: {
|
||||
h_4: {
|
||||
buckets: [{ doc_count: 1, key: 1000 }],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const rawDocumentQuery = {
|
||||
target: {
|
||||
refId: 'RAW_DOC',
|
||||
metrics: [{ type: 'raw_document', id: 'r_5' }],
|
||||
bucketAggs: [],
|
||||
},
|
||||
response: {
|
||||
hits: {
|
||||
total: 2,
|
||||
hits: [
|
||||
{
|
||||
_id: '5',
|
||||
_type: 'type',
|
||||
_index: 'index',
|
||||
_source: { sourceProp: 'asd' },
|
||||
fields: { fieldProp: 'field' },
|
||||
},
|
||||
{
|
||||
_source: { sourceProp: 'asd2' },
|
||||
fields: { fieldProp: 'field2' },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const percentilesQuery = {
|
||||
target: {
|
||||
refId: 'PERCENTILE',
|
||||
metrics: [{ type: 'percentiles', settings: { percents: [75, 90] }, id: 'p_1' }],
|
||||
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: 'p_3' }],
|
||||
},
|
||||
response: {
|
||||
aggregations: {
|
||||
p_3: {
|
||||
buckets: [
|
||||
{
|
||||
p_1: { values: { '75': 3.3, '90': 5.5 } },
|
||||
doc_count: 10,
|
||||
key: 1000,
|
||||
},
|
||||
{
|
||||
p_1: { values: { '75': 2.3, '90': 4.5 } },
|
||||
doc_count: 15,
|
||||
key: 2000,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const extendedStatsQuery = {
|
||||
target: {
|
||||
refId: 'EXTENDEDSTATS',
|
||||
metrics: [
|
||||
{
|
||||
type: 'extended_stats',
|
||||
meta: { max: true, std_deviation_bounds_upper: true },
|
||||
id: 'e_1',
|
||||
},
|
||||
],
|
||||
bucketAggs: [
|
||||
{ type: 'terms', field: 'host', id: 'e_3' },
|
||||
{ type: 'date_histogram', id: 'e_4' },
|
||||
],
|
||||
},
|
||||
response: {
|
||||
aggregations: {
|
||||
e_3: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'server1',
|
||||
e_4: {
|
||||
buckets: [
|
||||
{
|
||||
e_1: {
|
||||
max: 10.2,
|
||||
min: 5.5,
|
||||
std_deviation_bounds: { upper: 3, lower: -2 },
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 1000,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'server2',
|
||||
e_4: {
|
||||
buckets: [
|
||||
{
|
||||
e_1: {
|
||||
max: 10.2,
|
||||
min: 5.5,
|
||||
std_deviation_bounds: { upper: 3, lower: -2 },
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 1000,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const commonTargets = [
|
||||
{ ...countQuery.target },
|
||||
{ ...countGroupByHistogramQuery.target },
|
||||
{ ...rawDocumentQuery.target },
|
||||
{ ...percentilesQuery.target },
|
||||
{ ...extendedStatsQuery.target },
|
||||
];
|
||||
|
||||
const commonResponses = [
|
||||
{ ...countQuery.response },
|
||||
{ ...countGroupByHistogramQuery.response },
|
||||
{ ...rawDocumentQuery.response },
|
||||
{ ...percentilesQuery.response },
|
||||
{ ...extendedStatsQuery.response },
|
||||
];
|
||||
|
||||
describe('When processing responses as DataFrames (raw_data query present)', () => {
|
||||
beforeEach(() => {
|
||||
targets = [
|
||||
...commonTargets,
|
||||
// Raw Data Query
|
||||
{
|
||||
refId: 'D',
|
||||
metrics: [{ type: 'raw_data', id: '6' }],
|
||||
bucketAggs: [],
|
||||
},
|
||||
];
|
||||
|
||||
response = {
|
||||
responses: [
|
||||
...commonResponses,
|
||||
// Raw Data Query
|
||||
{
|
||||
hits: {
|
||||
total: {
|
||||
relation: 'eq',
|
||||
value: 1,
|
||||
},
|
||||
hits: [
|
||||
{
|
||||
_id: '6',
|
||||
_type: '_doc',
|
||||
_index: 'index',
|
||||
_source: { sourceProp: 'asd' },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should add the correct refId to each returned series', () => {
|
||||
expect(result.data[0].refId).toBe(countQuery.target.refId);
|
||||
|
||||
expect(result.data[1].refId).toBe(countGroupByHistogramQuery.target.refId);
|
||||
|
||||
expect(result.data[2].refId).toBe(rawDocumentQuery.target.refId);
|
||||
|
||||
expect(result.data[3].refId).toBe(percentilesQuery.target.refId);
|
||||
expect(result.data[4].refId).toBe(percentilesQuery.target.refId);
|
||||
|
||||
expect(result.data[5].refId).toBe(extendedStatsQuery.target.refId);
|
||||
|
||||
// Raw Data query
|
||||
expect(result.data[result.data.length - 1].refId).toBe('D');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When NOT processing responses as DataFrames (raw_data query NOT present)', () => {
|
||||
beforeEach(() => {
|
||||
targets = [...commonTargets];
|
||||
|
||||
response = {
|
||||
responses: [...commonResponses],
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should add the correct refId to each returned series', () => {
|
||||
expect(result.data[0].refId).toBe(countQuery.target.refId);
|
||||
|
||||
expect(result.data[1].refId).toBe(countGroupByHistogramQuery.target.refId);
|
||||
|
||||
expect(result.data[2].refId).toBe(rawDocumentQuery.target.refId);
|
||||
|
||||
expect(result.data[3].refId).toBe(percentilesQuery.target.refId);
|
||||
expect(result.data[4].refId).toBe(percentilesQuery.target.refId);
|
||||
|
||||
expect(result.data[5].refId).toBe(extendedStatsQuery.target.refId);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('simple query and count', () => {
|
||||
beforeEach(() => {
|
||||
targets = [
|
||||
|
||||
Reference in New Issue
Block a user