mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Elastic: Map level field based on config. (#22182)
* Map level field based on config. * Fix type
This commit is contained in:
parent
10fbabfb2e
commit
934d93ad94
@ -312,7 +312,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
|
||||
const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
|
||||
|
||||
let logLevel = LogLevel.unknown;
|
||||
if (logLevelField) {
|
||||
if (logLevelField && logLevelField.values.get(j)) {
|
||||
logLevel = getLogLevelFromKey(logLevelField.values.get(j));
|
||||
} else if (seriesLogLevel) {
|
||||
logLevel = seriesLogLevel;
|
||||
|
@ -416,7 +416,6 @@ export class ElasticResponse {
|
||||
|
||||
getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse {
|
||||
const dataFrame: DataFrame[] = [];
|
||||
const docs: any[] = [];
|
||||
|
||||
for (let n = 0; n < this.response.responses.length; n++) {
|
||||
const response = this.response.responses[n];
|
||||
@ -424,78 +423,18 @@ export class ElasticResponse {
|
||||
throw this.getErrorFromElasticResponse(this.response, response.error);
|
||||
}
|
||||
|
||||
// We keep a list of all props so that we can create all the fields in the dataFrame, this can lead
|
||||
// to wide sparse dataframes in case the scheme is different per document.
|
||||
let propNames: string[] = [];
|
||||
|
||||
for (const hit of response.hits.hits) {
|
||||
const flattened = hit._source ? flatten(hit._source, null) : {};
|
||||
const doc = {
|
||||
_id: hit._id,
|
||||
_type: hit._type,
|
||||
_index: hit._index,
|
||||
_source: { ...flattened },
|
||||
...flattened,
|
||||
};
|
||||
|
||||
for (const propName of Object.keys(doc)) {
|
||||
if (propNames.indexOf(propName) === -1) {
|
||||
propNames.push(propName);
|
||||
}
|
||||
}
|
||||
|
||||
docs.push(doc);
|
||||
}
|
||||
|
||||
const { propNames, docs } = flattenHits(response.hits.hits);
|
||||
if (docs.length > 0) {
|
||||
propNames = propNames.sort();
|
||||
const series = new MutableDataFrame({ fields: [] });
|
||||
|
||||
series.addField({
|
||||
name: this.targets[0].timeField,
|
||||
type: FieldType.time,
|
||||
});
|
||||
|
||||
if (logMessageField) {
|
||||
series.addField({
|
||||
name: logMessageField,
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
} else {
|
||||
series.addField({
|
||||
name: '_source',
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return JSON.stringify(v, null, 2);
|
||||
};
|
||||
}
|
||||
|
||||
if (logLevelField) {
|
||||
series.addField({
|
||||
name: 'level',
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
}
|
||||
|
||||
for (const propName of propNames) {
|
||||
if (propName === this.targets[0].timeField || propName === '_source') {
|
||||
continue;
|
||||
}
|
||||
|
||||
series.addField({
|
||||
name: propName,
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
}
|
||||
const series = createEmptyDataFrame(propNames, this.targets[0].timeField, logMessageField, logLevelField);
|
||||
|
||||
// Add a row for each document
|
||||
for (const doc of docs) {
|
||||
if (logLevelField) {
|
||||
// Remap level field based on the datasource config. This field is then used in explore to figure out the
|
||||
// log level. We may rewrite some actual data in the level field if they are different.
|
||||
doc['level'] = doc[logLevelField];
|
||||
}
|
||||
|
||||
series.add(doc);
|
||||
}
|
||||
|
||||
@ -522,3 +461,110 @@ export class ElasticResponse {
|
||||
return { data: dataFrame };
|
||||
}
|
||||
}
|
||||
|
||||
type Doc = {
|
||||
_id: string;
|
||||
_type: string;
|
||||
_index: string;
|
||||
_source?: any;
|
||||
};
|
||||
|
||||
/**
|
||||
* Flatten the docs from response mainly the _source part which can be nested. This flattens it so that it is one level
|
||||
* deep and the keys are: `level1Name.level2Name...`. Also returns list of all properties from all the docs (not all
|
||||
* docs have to have the same keys).
|
||||
* @param hits
|
||||
*/
|
||||
const flattenHits = (hits: Doc[]): { docs: Array<Record<string, any>>; propNames: string[] } => {
|
||||
const docs: any[] = [];
|
||||
// We keep a list of all props so that we can create all the fields in the dataFrame, this can lead
|
||||
// to wide sparse dataframes in case the scheme is different per document.
|
||||
let propNames: string[] = [];
|
||||
|
||||
for (const hit of hits) {
|
||||
const flattened = hit._source ? flatten(hit._source, null) : {};
|
||||
const doc = {
|
||||
_id: hit._id,
|
||||
_type: hit._type,
|
||||
_index: hit._index,
|
||||
_source: { ...flattened },
|
||||
...flattened,
|
||||
};
|
||||
|
||||
for (const propName of Object.keys(doc)) {
|
||||
if (propNames.indexOf(propName) === -1) {
|
||||
propNames.push(propName);
|
||||
}
|
||||
}
|
||||
|
||||
docs.push(doc);
|
||||
}
|
||||
|
||||
propNames.sort();
|
||||
return { docs, propNames };
|
||||
};
|
||||
|
||||
/**
|
||||
* Create empty dataframe but with created fields. Fields are based from propNames (should be from the response) and
|
||||
* also from configuration specified fields for message, time, and level.
|
||||
* @param propNames
|
||||
* @param timeField
|
||||
* @param logMessageField
|
||||
* @param logLevelField
|
||||
*/
|
||||
const createEmptyDataFrame = (
|
||||
propNames: string[],
|
||||
timeField: string,
|
||||
logMessageField?: string,
|
||||
logLevelField?: string
|
||||
): MutableDataFrame => {
|
||||
const series = new MutableDataFrame({ fields: [] });
|
||||
|
||||
series.addField({
|
||||
name: timeField,
|
||||
type: FieldType.time,
|
||||
});
|
||||
|
||||
if (logMessageField) {
|
||||
series.addField({
|
||||
name: logMessageField,
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
} else {
|
||||
series.addField({
|
||||
name: '_source',
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return JSON.stringify(v, null, 2);
|
||||
};
|
||||
}
|
||||
|
||||
if (logLevelField) {
|
||||
series.addField({
|
||||
name: 'level',
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
}
|
||||
|
||||
const fieldNames = series.fields.map(field => field.name);
|
||||
|
||||
for (const propName of propNames) {
|
||||
// Do not duplicate fields. This can mean that we will shadow some fields.
|
||||
if (fieldNames.includes(propName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
series.addField({
|
||||
name: propName,
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
}
|
||||
|
||||
return series;
|
||||
};
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { DataFrameView, KeyValue, MutableDataFrame } from '@grafana/data';
|
||||
import { DataFrameView, FieldCache, KeyValue, MutableDataFrame } from '@grafana/data';
|
||||
import { ElasticResponse } from '../elastic_response';
|
||||
import flatten from 'app/core/utils/flatten';
|
||||
|
||||
describe('ElasticResponse', () => {
|
||||
let targets;
|
||||
@ -827,71 +828,76 @@ describe('ElasticResponse', () => {
|
||||
});
|
||||
|
||||
describe('simple logs query and count', () => {
|
||||
beforeEach(() => {
|
||||
targets = [
|
||||
const targets: any = [
|
||||
{
|
||||
refId: 'A',
|
||||
metrics: [{ type: 'count', id: '1' }],
|
||||
bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '2' }],
|
||||
context: 'explore',
|
||||
interval: '10s',
|
||||
isLogsQuery: true,
|
||||
key: 'Q-1561369883389-0.7611823271062786-0',
|
||||
liveStreaming: false,
|
||||
maxDataPoints: 1620,
|
||||
query: '',
|
||||
timeField: '@timestamp',
|
||||
},
|
||||
];
|
||||
const response = {
|
||||
responses: [
|
||||
{
|
||||
refId: 'A',
|
||||
metrics: [{ type: 'count', id: '1' }],
|
||||
bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '2' }],
|
||||
context: 'explore',
|
||||
interval: '10s',
|
||||
isLogsQuery: true,
|
||||
key: 'Q-1561369883389-0.7611823271062786-0',
|
||||
liveStreaming: false,
|
||||
maxDataPoints: 1620,
|
||||
query: '',
|
||||
timeField: '@timestamp',
|
||||
},
|
||||
];
|
||||
response = {
|
||||
responses: [
|
||||
{
|
||||
aggregations: {
|
||||
'2': {
|
||||
buckets: [
|
||||
{
|
||||
doc_count: 10,
|
||||
key: 1000,
|
||||
},
|
||||
{
|
||||
doc_count: 15,
|
||||
key: 2000,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
hits: {
|
||||
hits: [
|
||||
aggregations: {
|
||||
'2': {
|
||||
buckets: [
|
||||
{
|
||||
_id: 'fdsfs',
|
||||
_type: '_doc',
|
||||
_index: 'mock-index',
|
||||
_source: {
|
||||
'@timestamp': '2019-06-24T09:51:19.765Z',
|
||||
host: 'djisaodjsoad',
|
||||
message: 'hello, i am a message',
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 1000,
|
||||
},
|
||||
{
|
||||
_id: 'kdospaidopa',
|
||||
_type: '_doc',
|
||||
_index: 'mock-index',
|
||||
_source: {
|
||||
'@timestamp': '2019-06-24T09:52:19.765Z',
|
||||
host: 'dsalkdakdop',
|
||||
message: 'hello, i am also message',
|
||||
},
|
||||
doc_count: 15,
|
||||
key: 2000,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getLogs();
|
||||
});
|
||||
hits: {
|
||||
hits: [
|
||||
{
|
||||
_id: 'fdsfs',
|
||||
_type: '_doc',
|
||||
_index: 'mock-index',
|
||||
_source: {
|
||||
'@timestamp': '2019-06-24T09:51:19.765Z',
|
||||
host: 'djisaodjsoad',
|
||||
message: 'hello, i am a message',
|
||||
level: 'debug',
|
||||
fields: {
|
||||
lvl: 'debug',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
_id: 'kdospaidopa',
|
||||
_type: '_doc',
|
||||
_index: 'mock-index',
|
||||
_source: {
|
||||
'@timestamp': '2019-06-24T09:52:19.765Z',
|
||||
host: 'dsalkdakdop',
|
||||
message: 'hello, i am also message',
|
||||
level: 'error',
|
||||
fields: {
|
||||
lvl: 'info',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('should return histogram aggregation and documents', () => {
|
||||
const result = new ElasticResponse(targets, response).getLogs();
|
||||
expect(result.data.length).toBe(2);
|
||||
const logResults = result.data[0] as MutableDataFrame;
|
||||
const fields = logResults.fields.map(f => {
|
||||
@ -911,7 +917,7 @@ describe('ElasticResponse', () => {
|
||||
expect(r._id).toEqual(response.responses[0].hits.hits[i]._id);
|
||||
expect(r._type).toEqual(response.responses[0].hits.hits[i]._type);
|
||||
expect(r._index).toEqual(response.responses[0].hits.hits[i]._index);
|
||||
expect(r._source).toEqual(response.responses[0].hits.hits[i]._source);
|
||||
expect(r._source).toEqual(flatten(response.responses[0].hits.hits[i]._source, null));
|
||||
}
|
||||
|
||||
// Make a map from the histogram results
|
||||
@ -927,5 +933,19 @@ describe('ElasticResponse', () => {
|
||||
expect(hist[bucket.key]).toEqual(bucket.doc_count);
|
||||
});
|
||||
});
|
||||
|
||||
it('should map levels field', () => {
|
||||
const result = new ElasticResponse(targets, response).getLogs(undefined, 'level');
|
||||
const fieldCache = new FieldCache(result.data[0]);
|
||||
const field = fieldCache.getFieldByName('level');
|
||||
expect(field.values.toArray()).toEqual(['debug', 'error']);
|
||||
});
|
||||
|
||||
it('should re map levels field to new field', () => {
|
||||
const result = new ElasticResponse(targets, response).getLogs(undefined, 'fields.lvl');
|
||||
const fieldCache = new FieldCache(result.data[0]);
|
||||
const field = fieldCache.getFieldByName('level');
|
||||
expect(field.values.toArray()).toEqual(['debug', 'info']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user