mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
logs: handle dataplane-compatible logs-frames (#70318)
* logs: handle dataplane-compatible logs-frames * simpler tests * simpler code * simpler code * switch to null from undefined * updated tests * added test for simplest case * more direct comparisons * simplified code * removed by-mistake added code
This commit is contained in:
parent
8e9ec9dcef
commit
e9b9a08dd0
@ -14,7 +14,6 @@ import {
|
||||
FieldCache,
|
||||
FieldColorModeId,
|
||||
FieldType,
|
||||
FieldWithIndex,
|
||||
findCommonLabels,
|
||||
findUniqueLabels,
|
||||
getTimeField,
|
||||
@ -41,6 +40,7 @@ import { BarAlignment, GraphDrawStyle, StackingMode } from '@grafana/schema';
|
||||
import { ansicolor, colors } from '@grafana/ui';
|
||||
import { getThemeColor } from 'app/core/utils/colors';
|
||||
|
||||
import { Attributes, LogsFrame, parseLogsFrame } from '../features/logs/logsFrame';
|
||||
import { getLogLevel, getLogLevelFromKey, sortInAscendingOrder } from '../features/logs/utils';
|
||||
|
||||
export const LIMIT_LABEL = 'Line limit';
|
||||
@ -310,42 +310,20 @@ function separateLogsAndMetrics(dataFrames: DataFrame[]) {
|
||||
return { logSeries, metricSeries };
|
||||
}
|
||||
|
||||
interface LogFields {
|
||||
series: DataFrame;
|
||||
|
||||
timeField: FieldWithIndex;
|
||||
stringField: FieldWithIndex;
|
||||
labelsField?: FieldWithIndex;
|
||||
timeNanosecondField?: FieldWithIndex;
|
||||
logLevelField?: FieldWithIndex;
|
||||
idField?: FieldWithIndex;
|
||||
interface LogInfo {
|
||||
rawFrame: DataFrame;
|
||||
logsFrame: LogsFrame;
|
||||
frameLabels?: Labels[];
|
||||
}
|
||||
|
||||
function getAllLabels(fields: LogFields): Labels[] {
|
||||
// there are two types of dataframes we handle:
|
||||
// 1. labels are in a separate field (more efficient when labels change by every log-row)
|
||||
// 2. labels are in in the string-field's `.labels` attribute
|
||||
export function attributesToLabels(attributes: Attributes): Labels {
|
||||
const result: Labels = {};
|
||||
|
||||
const { stringField, labelsField } = fields;
|
||||
Object.entries(attributes).forEach(([k, v]) => {
|
||||
result[k] = typeof v === 'string' ? v : JSON.stringify(v);
|
||||
});
|
||||
|
||||
if (labelsField !== undefined) {
|
||||
return labelsField.values;
|
||||
} else {
|
||||
return [stringField.labels ?? {}];
|
||||
}
|
||||
}
|
||||
|
||||
function getLabelsForFrameRow(fields: LogFields, index: number): Labels {
|
||||
// there are two types of dataframes we handle.
|
||||
// either labels-on-the-string-field, or labels-in-the-labels-field
|
||||
|
||||
const { stringField, labelsField } = fields;
|
||||
|
||||
if (labelsField !== undefined) {
|
||||
return labelsField.values[index];
|
||||
} else {
|
||||
return stringField.labels ?? {};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -359,7 +337,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
|
||||
const allLabels: Labels[][] = [];
|
||||
|
||||
// Find the fields we care about and collect all labels
|
||||
let allSeries: LogFields[] = [];
|
||||
let allSeries: LogInfo[] = [];
|
||||
|
||||
// We are sometimes passing data frames with no fields because we want to calculate correct meta stats.
|
||||
// Therefore we need to filter out series with no fields. These series are used only for meta stats calculation.
|
||||
@ -367,31 +345,19 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
|
||||
|
||||
if (seriesWithFields.length) {
|
||||
seriesWithFields.forEach((series) => {
|
||||
const fieldCache = new FieldCache(series);
|
||||
const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
|
||||
const timeField = fieldCache.getFirstFieldOfType(FieldType.time);
|
||||
// NOTE: this is experimental, please do not use in your code.
|
||||
// we will get this custom-frame-type into the "real" frame-type list soon,
|
||||
// but the name might change, so please do not use it until then.
|
||||
const labelsField =
|
||||
series.meta?.custom?.frameType === 'LabeledTimeValues' ? fieldCache.getFieldByName('labels') : undefined;
|
||||
|
||||
if (stringField !== undefined && timeField !== undefined) {
|
||||
const logsFrame = parseLogsFrame(series);
|
||||
if (logsFrame != null) {
|
||||
// for now we ignore the nested-ness of attributes, and just stringify-them
|
||||
const frameLabels = logsFrame.getAttributesAsLabels() ?? undefined;
|
||||
const info = {
|
||||
series,
|
||||
timeField,
|
||||
labelsField,
|
||||
timeNanosecondField: fieldCache.getFieldByName('tsNs'),
|
||||
stringField,
|
||||
logLevelField: fieldCache.getFieldByName('level'),
|
||||
idField: getIdField(fieldCache),
|
||||
rawFrame: series,
|
||||
logsFrame: logsFrame,
|
||||
frameLabels,
|
||||
};
|
||||
|
||||
allSeries.push(info);
|
||||
|
||||
const labels = getAllLabels(info);
|
||||
if (labels.length > 0) {
|
||||
allLabels.push(labels);
|
||||
if (frameLabels && frameLabels.length > 0) {
|
||||
allLabels.push(frameLabels);
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -404,7 +370,8 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
|
||||
let hasUniqueLabels = false;
|
||||
|
||||
for (const info of allSeries) {
|
||||
const { timeField, timeNanosecondField, stringField, logLevelField, idField, series } = info;
|
||||
const { logsFrame, rawFrame: series, frameLabels } = info;
|
||||
const { timeField, timeNanosecondField, bodyField: stringField, severityField: logLevelField, idField } = logsFrame;
|
||||
|
||||
for (let j = 0; j < series.length; j++) {
|
||||
const ts = timeField.values[j];
|
||||
@ -426,7 +393,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
|
||||
const searchWords = series.meta?.custom?.searchWords ?? series.meta?.searchWords ?? [];
|
||||
const entry = hasAnsi ? ansicolor.strip(message) : message;
|
||||
|
||||
const labels = getLabelsForFrameRow(info, j);
|
||||
const labels = frameLabels?.[j];
|
||||
const uniqueLabels = findUniqueLabels(labels, commonLabels);
|
||||
if (Object.keys(uniqueLabels).length > 0) {
|
||||
hasUniqueLabels = true;
|
||||
@ -540,17 +507,6 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
|
||||
};
|
||||
}
|
||||
|
||||
function getIdField(fieldCache: FieldCache): FieldWithIndex | undefined {
|
||||
const idFieldNames = ['id'];
|
||||
for (const fieldName of idFieldNames) {
|
||||
const idField = fieldCache.getFieldByName(fieldName);
|
||||
if (idField) {
|
||||
return idField;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Used to add additional information to Line limit meta info
|
||||
function adjustMetaInfo(logsModel: LogsModel, visibleRangeMs?: number, requestedRangeMs?: number): LogsMetaItem[] {
|
||||
let logsModelMeta = [...logsModel.meta!];
|
||||
|
53
public/app/features/logs/legacyLogsFrame.ts
Normal file
53
public/app/features/logs/legacyLogsFrame.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { DataFrame, FieldCache, FieldType, Field, Labels } from '@grafana/data';
|
||||
|
||||
import type { LogsFrame } from './logsFrame';
|
||||
|
||||
function getLabels(frame: DataFrame, cache: FieldCache, lineField: Field): Labels[] | null {
|
||||
const useLabelsField = frame.meta?.custom?.frameType === 'LabeledTimeValues';
|
||||
|
||||
if (!useLabelsField) {
|
||||
const lineLabels = lineField.labels;
|
||||
if (lineLabels !== undefined) {
|
||||
const result = new Array(frame.length);
|
||||
result.fill(lineLabels);
|
||||
return result;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const labelsField = cache.getFieldByName('labels');
|
||||
|
||||
if (labelsField === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return labelsField.values;
|
||||
}
|
||||
|
||||
export function parseLegacyLogsFrame(frame: DataFrame): LogsFrame | null {
|
||||
const cache = new FieldCache(frame);
|
||||
const timeField = cache.getFields(FieldType.time)[0];
|
||||
const bodyField = cache.getFields(FieldType.string)[0];
|
||||
|
||||
// these two are mandatory
|
||||
if (timeField === undefined || bodyField === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const timeNanosecondField = cache.getFieldByName('tsNs') ?? null;
|
||||
const severityField = cache.getFieldByName('level') ?? null;
|
||||
const idField = cache.getFieldByName('id') ?? null;
|
||||
|
||||
const labels = getLabels(frame, cache, bodyField);
|
||||
|
||||
return {
|
||||
timeField,
|
||||
bodyField,
|
||||
timeNanosecondField,
|
||||
severityField,
|
||||
idField,
|
||||
attributes: labels,
|
||||
getAttributesAsLabels: () => labels,
|
||||
};
|
||||
}
|
209
public/app/features/logs/logsFrame.test.ts
Normal file
209
public/app/features/logs/logsFrame.test.ts
Normal file
@ -0,0 +1,209 @@
|
||||
import { FieldType, DataFrameType, Field, Labels } from '@grafana/data';
|
||||
|
||||
import { parseLogsFrame, attributesToLabels } from './logsFrame';
|
||||
|
||||
function makeString(name: string, values: string[], labels?: Labels): Field {
|
||||
return {
|
||||
name,
|
||||
type: FieldType.string,
|
||||
config: {},
|
||||
values,
|
||||
labels,
|
||||
};
|
||||
}
|
||||
|
||||
function makeTime(name: string, values: number[], nanos?: number[]): Field {
|
||||
return {
|
||||
name,
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values,
|
||||
};
|
||||
}
|
||||
|
||||
function makeObject(name: string, values: Object[]): Field {
|
||||
return {
|
||||
name,
|
||||
type: FieldType.other,
|
||||
config: {},
|
||||
values,
|
||||
};
|
||||
}
|
||||
|
||||
describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
it('should parse a dataplane-complaint logs frame', () => {
|
||||
const time = makeTime('timestamp', [1687185711795, 1687185711995]);
|
||||
const body = makeString('body', ['line1', 'line2']);
|
||||
const severity = makeString('severity', ['info', 'debug']);
|
||||
const id = makeString('id', ['id1', 'id2']);
|
||||
const attributes = makeObject('attributes', [
|
||||
{ counter: '38141', label: 'val2', level: 'warning' },
|
||||
{ counter: '38143', label: 'val2', level: 'info' },
|
||||
]);
|
||||
|
||||
const result = parseLogsFrame({
|
||||
meta: {
|
||||
type: DataFrameType.LogLines,
|
||||
},
|
||||
fields: [id, body, attributes, severity, time],
|
||||
length: 2,
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
|
||||
expect(result!.timeField.values[0]).toBe(time.values[0]);
|
||||
expect(result!.bodyField.values[0]).toBe(body.values[0]);
|
||||
expect(result!.idField?.values[0]).toBe(id.values[0]);
|
||||
expect(result!.timeNanosecondField).toBeNull();
|
||||
expect(result!.severityField?.values[0]).toBe(severity.values[0]);
|
||||
expect(result!.attributes).toStrictEqual([
|
||||
{ counter: '38141', label: 'val2', level: 'warning' },
|
||||
{ counter: '38143', label: 'val2', level: 'info' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse old Loki-style (grafana8.x) frames ( multi-frame, but here we only parse a single frame )', () => {
|
||||
const time = makeTime('ts', [1687185711795, 1687185711995]);
|
||||
const line = makeString('line', ['line1', 'line2'], { counter: '34543', lable: 'val3', level: 'info' });
|
||||
const id = makeString('id', ['id1', 'id2']);
|
||||
const ns = makeString('tsNs', ['1687185711795123456', '1687185711995987654']);
|
||||
|
||||
const result = parseLogsFrame({
|
||||
fields: [time, line, ns, id],
|
||||
length: 2,
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
|
||||
expect(result!.timeField.values[0]).toBe(time.values[0]);
|
||||
expect(result!.bodyField.values[0]).toBe(line.values[0]);
|
||||
expect(result!.idField?.values[0]).toBe(id.values[0]);
|
||||
expect(result!.timeNanosecondField?.values[0]).toBe(ns.values[0]);
|
||||
expect(result!.severityField).toBeNull();
|
||||
expect(result!.attributes).toStrictEqual([
|
||||
{ counter: '34543', lable: 'val3', level: 'info' },
|
||||
{ counter: '34543', lable: 'val3', level: 'info' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse a Loki-style frame (single-frame, labels-in-json)', () => {
|
||||
const time = makeTime('Time', [1687185711795, 1687185711995]);
|
||||
const line = makeString('Line', ['line1', 'line2']);
|
||||
const id = makeString('id', ['id1', 'id2']);
|
||||
const ns = makeString('tsNs', ['1687185711795123456', '1687185711995987654']);
|
||||
const labels = makeObject('labels', [
|
||||
{ counter: '38141', label: 'val2', level: 'warning' },
|
||||
{ counter: '38143', label: 'val2', level: 'info' },
|
||||
]);
|
||||
|
||||
const result = parseLogsFrame({
|
||||
meta: {
|
||||
custom: {
|
||||
frameType: 'LabeledTimeValues',
|
||||
},
|
||||
},
|
||||
fields: [labels, time, line, ns, id],
|
||||
length: 2,
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
|
||||
expect(result!.timeField.values[0]).toBe(time.values[0]);
|
||||
expect(result!.bodyField.values[0]).toBe(line.values[0]);
|
||||
expect(result!.idField?.values[0]).toBe(id.values[0]);
|
||||
expect(result!.timeNanosecondField?.values[0]).toBe(ns.values[0]);
|
||||
expect(result!.severityField).toBeNull();
|
||||
expect(result!.attributes).toStrictEqual([
|
||||
{ counter: '38141', label: 'val2', level: 'warning' },
|
||||
{ counter: '38143', label: 'val2', level: 'info' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse elastic-style frame (has level-field, no labels parsed, extra fields ignored)', () => {
|
||||
const time = makeTime('Time', [1687185711795, 1687185711995]);
|
||||
const line = makeString('Line', ['line1', 'line2']);
|
||||
const source = makeObject('_source', [
|
||||
{ counter: '38141', label: 'val2', level: 'warning' },
|
||||
{ counter: '38143', label: 'val2', level: 'info' },
|
||||
]);
|
||||
const host = makeString('hostname', ['h1', 'h2']);
|
||||
const level = makeString('level', ['info', 'error']);
|
||||
|
||||
const result = parseLogsFrame({
|
||||
meta: {
|
||||
custom: {
|
||||
frameType: 'LabeledTimeValues',
|
||||
},
|
||||
},
|
||||
fields: [time, line, source, level, host],
|
||||
length: 2,
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
|
||||
expect(result!.timeField.values[0]).toBe(time.values[0]);
|
||||
expect(result!.bodyField.values[0]).toBe(line.values[0]);
|
||||
expect(result!.severityField?.values[0]).toBe(level.values[0]);
|
||||
expect(result!.idField).toBeNull();
|
||||
expect(result!.timeNanosecondField).toBeNull();
|
||||
expect(result!.attributes).toBeNull();
|
||||
});
|
||||
|
||||
it('should parse a minimal old-style frame (only two fields, time and line)', () => {
|
||||
const time = makeTime('Time', [1687185711795, 1687185711995]);
|
||||
const line = makeString('Line', ['line1', 'line2']);
|
||||
|
||||
const result = parseLogsFrame({
|
||||
fields: [time, line],
|
||||
length: 2,
|
||||
});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
|
||||
expect(result!.timeField.values[0]).toBe(time.values[0]);
|
||||
expect(result!.bodyField.values[0]).toBe(line.values[0]);
|
||||
expect(result!.severityField).toBeNull();
|
||||
expect(result!.idField).toBeNull();
|
||||
expect(result!.timeNanosecondField).toBeNull();
|
||||
expect(result!.attributes).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('attributesToLabels', () => {
|
||||
it('should convert nested structures correctly', () => {
|
||||
expect(
|
||||
attributesToLabels({
|
||||
key1: 'val1',
|
||||
key2: ['k2v1', 'k2v2', 'k2v3'],
|
||||
key3: {
|
||||
k3k1: 'v1',
|
||||
k3k2: 'v2',
|
||||
k3k3: [
|
||||
'k3k3v1',
|
||||
{
|
||||
k3k3k1: 'one',
|
||||
k3k3k2: 'two',
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
).toStrictEqual({
|
||||
key1: 'val1',
|
||||
key2: '["k2v1","k2v2","k2v3"]',
|
||||
key3: '{"k3k1":"v1","k3k2":"v2","k3k3":["k3k3v1",{"k3k3k1":"one","k3k3k2":"two"}]}',
|
||||
});
|
||||
});
|
||||
|
||||
it('should convert not-nested structures correctly', () => {
|
||||
expect(
|
||||
attributesToLabels({
|
||||
key1: 'val1',
|
||||
key2: 'val2',
|
||||
})
|
||||
).toStrictEqual({
|
||||
key1: 'val1',
|
||||
key2: 'val2',
|
||||
});
|
||||
// FIXME
|
||||
});
|
||||
});
|
81
public/app/features/logs/logsFrame.ts
Normal file
81
public/app/features/logs/logsFrame.ts
Normal file
@ -0,0 +1,81 @@
|
||||
import { DataFrame, FieldCache, FieldType, FieldWithIndex, DataFrameType, Labels } from '@grafana/data';
|
||||
|
||||
import { parseLegacyLogsFrame } from './legacyLogsFrame';
|
||||
|
||||
// these are like Labels, but their values can be
|
||||
// arbitrary structures, not just strings
|
||||
export type Attributes = Record<string, unknown>;
|
||||
|
||||
// the attributes-access is a little awkward, but it's necessary
|
||||
// because there are multiple,very different dataframe-represenations.
|
||||
export type LogsFrame = {
|
||||
timeField: FieldWithIndex;
|
||||
bodyField: FieldWithIndex;
|
||||
timeNanosecondField: FieldWithIndex | null;
|
||||
severityField: FieldWithIndex | null;
|
||||
idField: FieldWithIndex | null;
|
||||
attributes: Attributes[] | null;
|
||||
getAttributesAsLabels: () => Labels[] | null; // temporarily exists to make the labels=>attributes migration simpler
|
||||
};
|
||||
|
||||
function getField(cache: FieldCache, name: string, fieldType: FieldType): FieldWithIndex | undefined {
|
||||
const field = cache.getFieldByName(name);
|
||||
if (field === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return field.type === fieldType ? field : undefined;
|
||||
}
|
||||
|
||||
const DATAPLANE_TIMESTAMP_NAME = 'timestamp';
|
||||
const DATAPLANE_BODY_NAME = 'body';
|
||||
const DATAPLANE_SEVERITY_NAME = 'severity';
|
||||
const DATAPLANE_ID_NAME = 'id';
|
||||
const DATAPLANE_ATTRIBUTES_NAME = 'attributes';
|
||||
|
||||
export function attributesToLabels(attributes: Attributes): Labels {
|
||||
const result: Labels = {};
|
||||
|
||||
Object.entries(attributes).forEach(([k, v]) => {
|
||||
result[k] = typeof v === 'string' ? v : JSON.stringify(v);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function parseDataplaneLogsFrame(frame: DataFrame): LogsFrame | null {
|
||||
const cache = new FieldCache(frame);
|
||||
|
||||
const timestampField = getField(cache, DATAPLANE_TIMESTAMP_NAME, FieldType.time);
|
||||
const bodyField = getField(cache, DATAPLANE_BODY_NAME, FieldType.string);
|
||||
|
||||
// these two are mandatory
|
||||
if (timestampField === undefined || bodyField === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const severityField = getField(cache, DATAPLANE_SEVERITY_NAME, FieldType.string) ?? null;
|
||||
const idField = getField(cache, DATAPLANE_ID_NAME, FieldType.string) ?? null;
|
||||
const attributesField = getField(cache, DATAPLANE_ATTRIBUTES_NAME, FieldType.other) ?? null;
|
||||
|
||||
const attributes = attributesField === null ? null : attributesField.values;
|
||||
|
||||
return {
|
||||
timeField: timestampField,
|
||||
bodyField,
|
||||
severityField,
|
||||
idField,
|
||||
attributes,
|
||||
timeNanosecondField: null,
|
||||
getAttributesAsLabels: () => (attributes !== null ? attributes.map(attributesToLabels) : null),
|
||||
};
|
||||
return null;
|
||||
}
|
||||
|
||||
export function parseLogsFrame(frame: DataFrame): LogsFrame | null {
|
||||
if (frame.meta?.type === DataFrameType.LogLines) {
|
||||
return parseDataplaneLogsFrame(frame);
|
||||
} else {
|
||||
return parseLegacyLogsFrame(frame);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user