mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Explore: Prevent empty Elasticsearch logs query responses from hiding the logs panel (#40217)
* remove return value from addPreferredVisualisationType * Elasticsearch: send empty series instead when no data is received for a query
This commit is contained in:
parent
ea0c1006f5
commit
b0391d4933
@ -489,7 +489,7 @@ export class ElasticResponse {
|
|||||||
return this.processResponseToDataFrames(true, logMessageField, logLevelField);
|
return this.processResponseToDataFrames(true, logMessageField, logLevelField);
|
||||||
}
|
}
|
||||||
|
|
||||||
processResponseToDataFrames(
|
private processResponseToDataFrames(
|
||||||
isLogsRequest: boolean,
|
isLogsRequest: boolean,
|
||||||
logMessageField?: string,
|
logMessageField?: string,
|
||||||
logLevelField?: string
|
logLevelField?: string
|
||||||
@ -501,64 +501,67 @@ export class ElasticResponse {
|
|||||||
throw this.getErrorFromElasticResponse(this.response, response.error);
|
throw this.getErrorFromElasticResponse(this.response, response.error);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (response.hits && response.hits.hits.length > 0) {
|
if (response.hits) {
|
||||||
const { propNames, docs } = flattenHits(response.hits.hits);
|
const { propNames, docs } = flattenHits(response.hits.hits);
|
||||||
if (docs.length > 0) {
|
|
||||||
let series = createEmptyDataFrame(
|
|
||||||
propNames.map(toNameTypePair(docs)),
|
|
||||||
this.targets[0].timeField!,
|
|
||||||
isLogsRequest,
|
|
||||||
logMessageField,
|
|
||||||
logLevelField
|
|
||||||
);
|
|
||||||
|
|
||||||
// Add a row for each document
|
const series = docs.length
|
||||||
for (const doc of docs) {
|
? createEmptyDataFrame(
|
||||||
if (logLevelField) {
|
propNames.map(toNameTypePair(docs)),
|
||||||
// Remap level field based on the datasource config. This field is
|
isLogsRequest,
|
||||||
// then used in explore to figure out the log level. We may rewrite
|
this.targets[0].timeField,
|
||||||
// some actual data in the level field if they are different.
|
logMessageField,
|
||||||
doc['level'] = doc[logLevelField];
|
logLevelField
|
||||||
}
|
)
|
||||||
// When highlighting exists, we need to collect all the highlighted
|
: createEmptyDataFrame([], isLogsRequest);
|
||||||
// phrases and add them to the DataFrame's meta.searchWords array.
|
|
||||||
if (doc.highlight) {
|
if (isLogsRequest) {
|
||||||
// There might be multiple words so we need two versions of the
|
addPreferredVisualisationType(series, 'logs');
|
||||||
// regular expression. One to match gobally, when used with part.match,
|
|
||||||
// it returns and array of matches. The second one is used to capture the
|
|
||||||
// values between the tags.
|
|
||||||
const globalHighlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP, 'g');
|
|
||||||
const highlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP);
|
|
||||||
const newSearchWords = Object.keys(doc.highlight)
|
|
||||||
.flatMap((key) => {
|
|
||||||
return doc.highlight[key].flatMap((line: string) => {
|
|
||||||
const matchedPhrases = line.match(globalHighlightWordRegex);
|
|
||||||
if (!matchedPhrases) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
return matchedPhrases.map((part) => {
|
|
||||||
const matches = part.match(highlightWordRegex);
|
|
||||||
return (matches && matches[1]) || null;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.filter(identity);
|
|
||||||
// If meta and searchWords already exists, add the words and
|
|
||||||
// deduplicate otherwise create a new set of search words.
|
|
||||||
const searchWords = series.meta?.searchWords
|
|
||||||
? uniq([...series.meta.searchWords, ...newSearchWords])
|
|
||||||
: [...newSearchWords];
|
|
||||||
series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords };
|
|
||||||
}
|
|
||||||
series.add(doc);
|
|
||||||
}
|
|
||||||
if (isLogsRequest) {
|
|
||||||
series = addPreferredVisualisationType(series, 'logs');
|
|
||||||
}
|
|
||||||
const target = this.targets[n];
|
|
||||||
series.refId = target.refId;
|
|
||||||
dataFrame.push(series);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add a row for each document
|
||||||
|
for (const doc of docs) {
|
||||||
|
if (logLevelField) {
|
||||||
|
// Remap level field based on the datasource config. This field is
|
||||||
|
// then used in explore to figure out the log level. We may rewrite
|
||||||
|
// some actual data in the level field if they are different.
|
||||||
|
doc['level'] = doc[logLevelField];
|
||||||
|
}
|
||||||
|
// When highlighting exists, we need to collect all the highlighted
|
||||||
|
// phrases and add them to the DataFrame's meta.searchWords array.
|
||||||
|
if (doc.highlight) {
|
||||||
|
// There might be multiple words so we need two versions of the
|
||||||
|
// regular expression. One to match gobally, when used with part.match,
|
||||||
|
// it returns and array of matches. The second one is used to capture the
|
||||||
|
// values between the tags.
|
||||||
|
const globalHighlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP, 'g');
|
||||||
|
const highlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP);
|
||||||
|
const newSearchWords = Object.keys(doc.highlight)
|
||||||
|
.flatMap((key) => {
|
||||||
|
return doc.highlight[key].flatMap((line: string) => {
|
||||||
|
const matchedPhrases = line.match(globalHighlightWordRegex);
|
||||||
|
if (!matchedPhrases) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return matchedPhrases.map((part) => {
|
||||||
|
const matches = part.match(highlightWordRegex);
|
||||||
|
return (matches && matches[1]) || null;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.filter(identity);
|
||||||
|
// If meta and searchWords already exists, add the words and
|
||||||
|
// deduplicate otherwise create a new set of search words.
|
||||||
|
const searchWords = series.meta?.searchWords
|
||||||
|
? uniq([...series.meta.searchWords, ...newSearchWords])
|
||||||
|
: [...newSearchWords];
|
||||||
|
series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords };
|
||||||
|
}
|
||||||
|
series.add(doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
const target = this.targets[n];
|
||||||
|
series.refId = target.refId;
|
||||||
|
dataFrame.push(series);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (response.aggregations) {
|
if (response.aggregations) {
|
||||||
@ -582,7 +585,7 @@ export class ElasticResponse {
|
|||||||
|
|
||||||
// When log results, show aggregations only in graph. Log fields are then going to be shown in table.
|
// When log results, show aggregations only in graph. Log fields are then going to be shown in table.
|
||||||
if (isLogsRequest) {
|
if (isLogsRequest) {
|
||||||
series = addPreferredVisualisationType(series, 'graph');
|
addPreferredVisualisationType(series, 'graph');
|
||||||
}
|
}
|
||||||
|
|
||||||
series.refId = target.refId;
|
series.refId = target.refId;
|
||||||
@ -690,20 +693,22 @@ const flattenHits = (hits: Doc[]): { docs: Array<Record<string, any>>; propNames
|
|||||||
*/
|
*/
|
||||||
const createEmptyDataFrame = (
|
const createEmptyDataFrame = (
|
||||||
props: Array<[string, FieldType]>,
|
props: Array<[string, FieldType]>,
|
||||||
timeField: string,
|
|
||||||
isLogsRequest: boolean,
|
isLogsRequest: boolean,
|
||||||
|
timeField?: string,
|
||||||
logMessageField?: string,
|
logMessageField?: string,
|
||||||
logLevelField?: string
|
logLevelField?: string
|
||||||
): MutableDataFrame => {
|
): MutableDataFrame => {
|
||||||
const series = new MutableDataFrame({ fields: [] });
|
const series = new MutableDataFrame({ fields: [] });
|
||||||
|
|
||||||
series.addField({
|
if (timeField) {
|
||||||
config: {
|
series.addField({
|
||||||
filterable: true,
|
config: {
|
||||||
},
|
filterable: true,
|
||||||
name: timeField,
|
},
|
||||||
type: FieldType.time,
|
name: timeField,
|
||||||
});
|
type: FieldType.time,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (logMessageField) {
|
if (logMessageField) {
|
||||||
series.addField({
|
series.addField({
|
||||||
@ -756,8 +761,6 @@ const addPreferredVisualisationType = (series: any, type: PreferredVisualisation
|
|||||||
: (s.meta = {
|
: (s.meta = {
|
||||||
preferredVisualisationType: type,
|
preferredVisualisationType: type,
|
||||||
});
|
});
|
||||||
|
|
||||||
return s;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const toNameTypePair = (docs: Array<Record<string, any>>) => (propName: string): [string, FieldType] => [
|
const toNameTypePair = (docs: Array<Record<string, any>>) => (propName: string): [string, FieldType] => [
|
||||||
|
@ -1432,4 +1432,39 @@ describe('ElasticResponse', () => {
|
|||||||
expect(fields).toContainEqual({ name: 'message', type: 'string' });
|
expect(fields).toContainEqual({ name: 'message', type: 'string' });
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('logs query with empty response', () => {
|
||||||
|
const targets: ElasticsearchQuery[] = [
|
||||||
|
{
|
||||||
|
refId: 'A',
|
||||||
|
metrics: [{ type: 'logs', id: '2' }],
|
||||||
|
bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '1' }],
|
||||||
|
key: 'Q-1561369883389-0.7611823271062786-0',
|
||||||
|
query: 'hello AND message',
|
||||||
|
timeField: '@timestamp',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
const response = {
|
||||||
|
responses: [
|
||||||
|
{
|
||||||
|
hits: { hits: [] },
|
||||||
|
aggregations: {
|
||||||
|
'1': {
|
||||||
|
buckets: [
|
||||||
|
{ key_as_string: '1633676760000', key: 1633676760000, doc_count: 0 },
|
||||||
|
{ key_as_string: '1633676770000', key: 1633676770000, doc_count: 0 },
|
||||||
|
{ key_as_string: '1633676780000', key: 1633676780000, doc_count: 0 },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
status: 200,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should return histogram aggregation and documents', () => {
|
||||||
|
const result = new ElasticResponse(targets, response).getLogs('message', 'level');
|
||||||
|
expect(result.data.length).toBe(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user