mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Elasticsearch: Create Raw Doc metric to render raw JSON docs in columns in the new table panel (#26233)
* test * WIP: Create v2 version * Update tests, remove conosole logs, refactor * Remove incorrect types * Update type * Rename legacy and new metrics * Update * Run request when Raw Data tto Raw Document switch * Fix size updating * Remove _source field from table results as we are showing each source field as column * Remove _source just for metrics, not logs * Revert "Remove _source just for metrics, not logs" This reverts commit611b6922f7. * Revert "Remove _source field from table results as we are showing each source field as column" This reverts commit31a9d5f81b. * Add vis preference for logs * Update visualisation to logs * Revert "Revert "Remove _source just for metrics"" This reverts commita102ab2894. Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>
This commit is contained in:
@@ -391,6 +391,88 @@ export class ElasticResponse {
|
||||
}
|
||||
|
||||
getTimeSeries() {
|
||||
if (this.targets.some((target: any) => target.metrics.some((metric: any) => metric.type === 'raw_data'))) {
|
||||
return this.processResponseToDataFrames(false);
|
||||
}
|
||||
return this.processResponseToSeries();
|
||||
}
|
||||
|
||||
getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse {
|
||||
return this.processResponseToDataFrames(true, logMessageField, logLevelField);
|
||||
}
|
||||
|
||||
processResponseToDataFrames(
|
||||
isLogsRequest: boolean,
|
||||
logMessageField?: string,
|
||||
logLevelField?: string
|
||||
): DataQueryResponse {
|
||||
const dataFrame: DataFrame[] = [];
|
||||
|
||||
for (let n = 0; n < this.response.responses.length; n++) {
|
||||
const response = this.response.responses[n];
|
||||
if (response.error) {
|
||||
throw this.getErrorFromElasticResponse(this.response, response.error);
|
||||
}
|
||||
|
||||
if (response.hits && response.hits.hits.length > 0) {
|
||||
const { propNames, docs } = flattenHits(response.hits.hits);
|
||||
if (docs.length > 0) {
|
||||
let series = createEmptyDataFrame(
|
||||
propNames,
|
||||
this.targets[0].timeField,
|
||||
isLogsRequest,
|
||||
logMessageField,
|
||||
logLevelField
|
||||
);
|
||||
|
||||
// Add a row for each document
|
||||
for (const doc of docs) {
|
||||
if (logLevelField) {
|
||||
// Remap level field based on the datasource config. This field is then used in explore to figure out the
|
||||
// log level. We may rewrite some actual data in the level field if they are different.
|
||||
doc['level'] = doc[logLevelField];
|
||||
}
|
||||
|
||||
series.add(doc);
|
||||
}
|
||||
if (isLogsRequest) {
|
||||
series = addPreferredVisualisationType(series, 'logs');
|
||||
}
|
||||
dataFrame.push(series);
|
||||
}
|
||||
}
|
||||
|
||||
if (response.aggregations) {
|
||||
const aggregations = response.aggregations;
|
||||
const target = this.targets[n];
|
||||
const tmpSeriesList: any[] = [];
|
||||
const table = new TableModel();
|
||||
|
||||
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
|
||||
this.trimDatapoints(tmpSeriesList, target);
|
||||
this.nameSeries(tmpSeriesList, target);
|
||||
|
||||
if (table.rows.length > 0) {
|
||||
dataFrame.push(toDataFrame(table));
|
||||
}
|
||||
|
||||
for (let y = 0; y < tmpSeriesList.length; y++) {
|
||||
let series = toDataFrame(tmpSeriesList[y]);
|
||||
|
||||
// When log results, show aggregations only in graph. Log fields are then going to be shown in table.
|
||||
if (isLogsRequest) {
|
||||
series = addPreferredVisualisationType(series, 'graph');
|
||||
}
|
||||
|
||||
dataFrame.push(series);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { data: dataFrame };
|
||||
}
|
||||
|
||||
processResponseToSeries = () => {
|
||||
const seriesList = [];
|
||||
|
||||
for (let i = 0; i < this.response.responses.length; i++) {
|
||||
@@ -424,59 +506,7 @@ export class ElasticResponse {
|
||||
}
|
||||
|
||||
return { data: seriesList };
|
||||
}
|
||||
|
||||
getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse {
|
||||
const dataFrame: DataFrame[] = [];
|
||||
|
||||
for (let n = 0; n < this.response.responses.length; n++) {
|
||||
const response = this.response.responses[n];
|
||||
if (response.error) {
|
||||
throw this.getErrorFromElasticResponse(this.response, response.error);
|
||||
}
|
||||
|
||||
const { propNames, docs } = flattenHits(response.hits.hits);
|
||||
if (docs.length > 0) {
|
||||
let series = createEmptyDataFrame(propNames, this.targets[0].timeField, logMessageField, logLevelField);
|
||||
|
||||
// Add a row for each document
|
||||
for (const doc of docs) {
|
||||
if (logLevelField) {
|
||||
// Remap level field based on the datasource config. This field is then used in explore to figure out the
|
||||
// log level. We may rewrite some actual data in the level field if they are different.
|
||||
doc['level'] = doc[logLevelField];
|
||||
}
|
||||
|
||||
series.add(doc);
|
||||
}
|
||||
|
||||
series = addPreferredVisualisationType(series, 'logs');
|
||||
dataFrame.push(series);
|
||||
}
|
||||
|
||||
if (response.aggregations) {
|
||||
const aggregations = response.aggregations;
|
||||
const target = this.targets[n];
|
||||
const tmpSeriesList: any[] = [];
|
||||
const table = new TableModel();
|
||||
|
||||
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
|
||||
this.trimDatapoints(tmpSeriesList, target);
|
||||
this.nameSeries(tmpSeriesList, target);
|
||||
|
||||
for (let y = 0; y < tmpSeriesList.length; y++) {
|
||||
let series = toDataFrame(tmpSeriesList[y]);
|
||||
|
||||
// When log results, show aggregations only in graph. Log fields are then going to be shown in table.
|
||||
series = addPreferredVisualisationType(series, 'graph');
|
||||
|
||||
dataFrame.push(series);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { data: dataFrame };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
type Doc = {
|
||||
@@ -532,6 +562,7 @@ const flattenHits = (hits: Doc[]): { docs: Array<Record<string, any>>; propNames
|
||||
const createEmptyDataFrame = (
|
||||
propNames: string[],
|
||||
timeField: string,
|
||||
isLogsRequest: boolean,
|
||||
logMessageField?: string,
|
||||
logLevelField?: string
|
||||
): MutableDataFrame => {
|
||||
@@ -549,13 +580,6 @@ const createEmptyDataFrame = (
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
} else {
|
||||
series.addField({
|
||||
name: '_source',
|
||||
type: FieldType.string,
|
||||
}).parse = (v: any) => {
|
||||
return JSON.stringify(v, null, 2);
|
||||
};
|
||||
}
|
||||
|
||||
if (logLevelField) {
|
||||
@@ -574,6 +598,10 @@ const createEmptyDataFrame = (
|
||||
if (fieldNames.includes(propName)) {
|
||||
continue;
|
||||
}
|
||||
// Do not add _source field (besides logs) as we are showing each _source field in table instead.
|
||||
if (!isLogsRequest && propName === '_source') {
|
||||
continue;
|
||||
}
|
||||
|
||||
series.addField({
|
||||
name: propName,
|
||||
|
||||
Reference in New Issue
Block a user