Explore: Prevent empty Elasticsearch logs query responses from hiding the logs panel (#40217)

* remove return value from addPreferredVisualisationType

* Elasticsearch: send empty series instead when no data is received for a query
This commit is contained in:
Giordano Ricci 2021-10-12 13:59:28 +01:00 committed by GitHub
parent ea0c1006f5
commit b0391d4933
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 105 additions and 67 deletions

View File

@ -489,7 +489,7 @@ export class ElasticResponse {
return this.processResponseToDataFrames(true, logMessageField, logLevelField);
}
processResponseToDataFrames(
private processResponseToDataFrames(
isLogsRequest: boolean,
logMessageField?: string,
logLevelField?: string
@ -501,64 +501,67 @@ export class ElasticResponse {
throw this.getErrorFromElasticResponse(this.response, response.error);
}
if (response.hits && response.hits.hits.length > 0) {
if (response.hits) {
const { propNames, docs } = flattenHits(response.hits.hits);
if (docs.length > 0) {
let series = createEmptyDataFrame(
propNames.map(toNameTypePair(docs)),
this.targets[0].timeField!,
isLogsRequest,
logMessageField,
logLevelField
);
// Add a row for each document
for (const doc of docs) {
if (logLevelField) {
// Remap level field based on the datasource config. This field is
// then used in explore to figure out the log level. We may rewrite
// some actual data in the level field if they are different.
doc['level'] = doc[logLevelField];
}
// When highlighting exists, we need to collect all the highlighted
// phrases and add them to the DataFrame's meta.searchWords array.
if (doc.highlight) {
// There might be multiple words so we need two versions of the
// regular expression. One to match gobally, when used with part.match,
// it returns and array of matches. The second one is used to capture the
// values between the tags.
const globalHighlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP, 'g');
const highlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP);
const newSearchWords = Object.keys(doc.highlight)
.flatMap((key) => {
return doc.highlight[key].flatMap((line: string) => {
const matchedPhrases = line.match(globalHighlightWordRegex);
if (!matchedPhrases) {
return [];
}
return matchedPhrases.map((part) => {
const matches = part.match(highlightWordRegex);
return (matches && matches[1]) || null;
});
});
})
.filter(identity);
// If meta and searchWords already exists, add the words and
// deduplicate otherwise create a new set of search words.
const searchWords = series.meta?.searchWords
? uniq([...series.meta.searchWords, ...newSearchWords])
: [...newSearchWords];
series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords };
}
series.add(doc);
}
if (isLogsRequest) {
series = addPreferredVisualisationType(series, 'logs');
}
const target = this.targets[n];
series.refId = target.refId;
dataFrame.push(series);
const series = docs.length
? createEmptyDataFrame(
propNames.map(toNameTypePair(docs)),
isLogsRequest,
this.targets[0].timeField,
logMessageField,
logLevelField
)
: createEmptyDataFrame([], isLogsRequest);
if (isLogsRequest) {
addPreferredVisualisationType(series, 'logs');
}
// Add a row for each document
for (const doc of docs) {
if (logLevelField) {
// Remap level field based on the datasource config. This field is
// then used in explore to figure out the log level. We may rewrite
// some actual data in the level field if they are different.
doc['level'] = doc[logLevelField];
}
// When highlighting exists, we need to collect all the highlighted
// phrases and add them to the DataFrame's meta.searchWords array.
if (doc.highlight) {
// There might be multiple words so we need two versions of the
// regular expression. One to match gobally, when used with part.match,
// it returns and array of matches. The second one is used to capture the
// values between the tags.
const globalHighlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP, 'g');
const highlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP);
const newSearchWords = Object.keys(doc.highlight)
.flatMap((key) => {
return doc.highlight[key].flatMap((line: string) => {
const matchedPhrases = line.match(globalHighlightWordRegex);
if (!matchedPhrases) {
return [];
}
return matchedPhrases.map((part) => {
const matches = part.match(highlightWordRegex);
return (matches && matches[1]) || null;
});
});
})
.filter(identity);
// If meta and searchWords already exists, add the words and
// deduplicate otherwise create a new set of search words.
const searchWords = series.meta?.searchWords
? uniq([...series.meta.searchWords, ...newSearchWords])
: [...newSearchWords];
series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords };
}
series.add(doc);
}
const target = this.targets[n];
series.refId = target.refId;
dataFrame.push(series);
}
if (response.aggregations) {
@ -582,7 +585,7 @@ export class ElasticResponse {
// When log results, show aggregations only in graph. Log fields are then going to be shown in table.
if (isLogsRequest) {
series = addPreferredVisualisationType(series, 'graph');
addPreferredVisualisationType(series, 'graph');
}
series.refId = target.refId;
@ -690,20 +693,22 @@ const flattenHits = (hits: Doc[]): { docs: Array<Record<string, any>>; propNames
*/
const createEmptyDataFrame = (
props: Array<[string, FieldType]>,
timeField: string,
isLogsRequest: boolean,
timeField?: string,
logMessageField?: string,
logLevelField?: string
): MutableDataFrame => {
const series = new MutableDataFrame({ fields: [] });
series.addField({
config: {
filterable: true,
},
name: timeField,
type: FieldType.time,
});
if (timeField) {
series.addField({
config: {
filterable: true,
},
name: timeField,
type: FieldType.time,
});
}
if (logMessageField) {
series.addField({
@ -756,8 +761,6 @@ const addPreferredVisualisationType = (series: any, type: PreferredVisualisation
: (s.meta = {
preferredVisualisationType: type,
});
return s;
};
const toNameTypePair = (docs: Array<Record<string, any>>) => (propName: string): [string, FieldType] => [

View File

@ -1432,4 +1432,39 @@ describe('ElasticResponse', () => {
expect(fields).toContainEqual({ name: 'message', type: 'string' });
});
});
describe('logs query with empty response', () => {
const targets: ElasticsearchQuery[] = [
{
refId: 'A',
metrics: [{ type: 'logs', id: '2' }],
bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '1' }],
key: 'Q-1561369883389-0.7611823271062786-0',
query: 'hello AND message',
timeField: '@timestamp',
},
];
const response = {
responses: [
{
hits: { hits: [] },
aggregations: {
'1': {
buckets: [
{ key_as_string: '1633676760000', key: 1633676760000, doc_count: 0 },
{ key_as_string: '1633676770000', key: 1633676770000, doc_count: 0 },
{ key_as_string: '1633676780000', key: 1633676780000, doc_count: 0 },
],
},
},
status: 200,
},
],
};
it('should return histogram aggregation and documents', () => {
const result = new ElasticResponse(targets, response).getLogs('message', 'level');
expect(result.data.length).toBe(2);
});
});
});