mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Logs: Update logic to process logs dataPlane frame with labels field (#77708)
* Logs: Update dataplane logic to use labels instead of attributes * Update Loki logs data plane data frame according to specs * Remove only in test
This commit is contained in:
parent
f9fffd3ff1
commit
d4a257bc8e
@ -173,7 +173,7 @@ func adjustDataplaneLogsFrame(frame *data.Frame, query *lokiQuery) error {
|
||||
}
|
||||
|
||||
timeField.Name = "timestamp"
|
||||
labelsField.Name = "attributes"
|
||||
labelsField.Name = "labels"
|
||||
lineField.Name = "body"
|
||||
|
||||
if frame.Meta == nil {
|
||||
|
@ -11,7 +11,7 @@
|
||||
// Name:
|
||||
// Dimensions: 4 Fields by 4 Rows
|
||||
// +------------------------------------------------+-------------------------------+----------------+------------------------------+
|
||||
// | Name: attributes | Name: timestamp | Name: body | Name: id |
|
||||
// | Name: labels | Name: timestamp | Name: body | Name: id |
|
||||
// | Labels: | Labels: | Labels: | Labels: |
|
||||
// | Type: []json.RawMessage | Type: []time.Time | Type: []string | Type: []string |
|
||||
// +------------------------------------------------+-------------------------------+----------------+------------------------------+
|
||||
@ -38,7 +38,7 @@
|
||||
},
|
||||
"fields": [
|
||||
{
|
||||
"name": "attributes",
|
||||
"name": "labels",
|
||||
"type": "other",
|
||||
"typeInfo": {
|
||||
"frame": "json.RawMessage"
|
||||
|
@ -119,7 +119,7 @@
|
||||
// Name:
|
||||
// Dimensions: 4 Fields by 6 Rows
|
||||
// +---------------------------------------+-----------------------------------------+------------------+--------------------------------+
|
||||
// | Name: attributes | Name: timestamp | Name: body | Name: id |
|
||||
// | Name: labels | Name: timestamp | Name: body | Name: id |
|
||||
// | Labels: | Labels: | Labels: | Labels: |
|
||||
// | Type: []json.RawMessage | Type: []time.Time | Type: []string | Type: []string |
|
||||
// +---------------------------------------+-----------------------------------------+------------------+--------------------------------+
|
||||
@ -256,7 +256,7 @@
|
||||
},
|
||||
"fields": [
|
||||
{
|
||||
"name": "attributes",
|
||||
"name": "labels",
|
||||
"type": "other",
|
||||
"typeInfo": {
|
||||
"frame": "json.RawMessage"
|
||||
|
@ -11,7 +11,7 @@
|
||||
// Name:
|
||||
// Dimensions: 4 Fields by 4 Rows
|
||||
// +------------------------------------------------+-------------------------------+----------------+------------------------------+
|
||||
// | Name: attributes | Name: timestamp | Name: body | Name: id |
|
||||
// | Name: labels | Name: timestamp | Name: body | Name: id |
|
||||
// | Labels: | Labels: | Labels: | Labels: |
|
||||
// | Type: []json.RawMessage | Type: []time.Time | Type: []string | Type: []string |
|
||||
// +------------------------------------------------+-------------------------------+----------------+------------------------------+
|
||||
@ -38,7 +38,7 @@
|
||||
},
|
||||
"fields": [
|
||||
{
|
||||
"name": "attributes",
|
||||
"name": "labels",
|
||||
"type": "other",
|
||||
"typeInfo": {
|
||||
"frame": "json.RawMessage"
|
||||
|
@ -119,7 +119,7 @@
|
||||
// Name:
|
||||
// Dimensions: 4 Fields by 6 Rows
|
||||
// +---------------------------------------+-----------------------------------------+------------------+--------------------------------+
|
||||
// | Name: attributes | Name: timestamp | Name: body | Name: id |
|
||||
// | Name: labels | Name: timestamp | Name: body | Name: id |
|
||||
// | Labels: | Labels: | Labels: | Labels: |
|
||||
// | Type: []json.RawMessage | Type: []time.Time | Type: []string | Type: []string |
|
||||
// +---------------------------------------+-----------------------------------------+------------------+--------------------------------+
|
||||
@ -256,7 +256,7 @@
|
||||
},
|
||||
"fields": [
|
||||
{
|
||||
"name": "attributes",
|
||||
"name": "labels",
|
||||
"type": "other",
|
||||
"typeInfo": {
|
||||
"frame": "json.RawMessage"
|
||||
|
@ -205,11 +205,11 @@ describe('LogsTable', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should not render `attributes`', async () => {
|
||||
it('should not render `labels`', async () => {
|
||||
setup(undefined, getMockLokiFrameDataPlane());
|
||||
|
||||
await waitFor(() => {
|
||||
const columns = screen.queryAllByRole('columnheader', { name: 'attributes' });
|
||||
const columns = screen.queryAllByRole('columnheader', { name: 'labels' });
|
||||
|
||||
expect(columns.length).toBe(0);
|
||||
});
|
||||
|
@ -181,12 +181,12 @@ const isFieldFilterable = (field: Field, logsFrame?: LogsFrame | undefined) => {
|
||||
function extractFieldsAndExclude(dataFrame: DataFrame) {
|
||||
return dataFrame.fields
|
||||
.filter((field: Field & { typeInfo?: { frame: string } }) => {
|
||||
const isFieldLokiLabels = field.typeInfo?.frame === 'json.RawMessage' && field.name === 'labels';
|
||||
const isFieldLokiLabels =
|
||||
field.typeInfo?.frame === 'json.RawMessage' &&
|
||||
field.name === 'labels' &&
|
||||
dataFrame?.meta?.type !== DataFrameType.LogLines;
|
||||
const isFieldDataplaneLabels =
|
||||
field.name === 'attributes' &&
|
||||
field.type === FieldType.other &&
|
||||
dataFrame?.meta?.type === DataFrameType.LogLines;
|
||||
|
||||
field.name === 'labels' && field.type === FieldType.other && dataFrame?.meta?.type === DataFrameType.LogLines;
|
||||
return isFieldLokiLabels || isFieldDataplaneLabels;
|
||||
})
|
||||
.flatMap((field: Field) => {
|
||||
@ -241,7 +241,7 @@ function buildLabelFilters(columnsWithMeta: Record<string, fieldNameMeta>, logsF
|
||||
|
||||
// We could be getting fresh data
|
||||
const uniqueLabels = new Set<string>();
|
||||
const logFrameLabels = logsFrame?.getAttributesAsLabels();
|
||||
const logFrameLabels = logsFrame?.getLogFrameLabelsAsLabels();
|
||||
|
||||
// Populate the set with all labels from latest dataframe
|
||||
logFrameLabels?.forEach((labels) => {
|
||||
|
@ -98,7 +98,7 @@ export function LogsTableWrap(props: Props) {
|
||||
useEffect(() => {
|
||||
const numberOfLogLines = dataFrame ? dataFrame.length : 0;
|
||||
const logsFrame = parseLogsFrame(dataFrame);
|
||||
const labels = logsFrame?.getAttributesAsLabels();
|
||||
const labels = logsFrame?.getLogFrameLabelsAsLabels();
|
||||
|
||||
const otherFields = logsFrame ? logsFrame.extraFields.filter((field) => !field?.config?.custom?.hidden) : [];
|
||||
if (logsFrame?.severityField) {
|
||||
|
@ -60,7 +60,7 @@ export const getMockLokiFrameDataPlane = (override?: Partial<DataFrame>): DataFr
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'attributes',
|
||||
name: 'labels',
|
||||
type: FieldType.other,
|
||||
values: [
|
||||
{ app: 'grafana', cluster: 'dev-us-central-0', container: 'hg-plugins' },
|
||||
|
@ -267,7 +267,7 @@ describe('logParser', () => {
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'attributes',
|
||||
name: 'labels',
|
||||
type: FieldType.other,
|
||||
values: [{ a: 1, b: 2 }],
|
||||
},
|
||||
@ -315,7 +315,7 @@ describe('logParser', () => {
|
||||
},
|
||||
{
|
||||
config: { links },
|
||||
name: 'attributes',
|
||||
name: 'labels',
|
||||
type: FieldType.other,
|
||||
values: [{ a: 1, b: 2 }],
|
||||
},
|
||||
@ -335,7 +335,7 @@ describe('logParser', () => {
|
||||
expectHasField(output, 'timestamp');
|
||||
expectHasField(output, 'body');
|
||||
expectHasField(output, 'id');
|
||||
expectHasField(output, 'attributes');
|
||||
expectHasField(output, 'labels');
|
||||
expectHasField(output, 'severity');
|
||||
});
|
||||
|
||||
|
@ -68,8 +68,8 @@ export function parseLegacyLogsFrame(frame: DataFrame): LogsFrame | null {
|
||||
timeNanosecondField,
|
||||
severityField,
|
||||
idField,
|
||||
getAttributes: getL,
|
||||
getAttributesAsLabels: getL,
|
||||
getLogFrameLabels: getL,
|
||||
getLogFrameLabelsAsLabels: getL,
|
||||
extraFields,
|
||||
};
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { FieldType, DataFrameType, Field, Labels } from '@grafana/data';
|
||||
|
||||
import { parseLogsFrame, attributesToLabels } from './logsFrame';
|
||||
import { parseLogsFrame, logFrameLabelsToLabels } from './logsFrame';
|
||||
|
||||
function makeString(name: string, values: string[], labels?: Labels): Field {
|
||||
return {
|
||||
@ -36,7 +36,7 @@ describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
const body = makeString('body', ['line1', 'line2']);
|
||||
const severity = makeString('severity', ['info', 'debug']);
|
||||
const id = makeString('id', ['id1', 'id2']);
|
||||
const attributes = makeObject('attributes', [
|
||||
const labels = makeObject('labels', [
|
||||
{ counter: '38141', label: 'val2', level: 'warning', nested: { a: '1', b: ['2', '3'] } },
|
||||
{ counter: '38143', label: 'val2', level: 'info', nested: { a: '11', b: ['12', '13'] } },
|
||||
]);
|
||||
@ -45,7 +45,7 @@ describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
meta: {
|
||||
type: DataFrameType.LogLines,
|
||||
},
|
||||
fields: [id, body, attributes, severity, time],
|
||||
fields: [id, body, labels, severity, time],
|
||||
length: 2,
|
||||
});
|
||||
|
||||
@ -56,11 +56,11 @@ describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
expect(result!.idField?.values[0]).toBe(id.values[0]);
|
||||
expect(result!.timeNanosecondField).toBeNull();
|
||||
expect(result!.severityField?.values[0]).toBe(severity.values[0]);
|
||||
expect(result!.getAttributes()).toStrictEqual([
|
||||
expect(result!.getLogFrameLabels()).toStrictEqual([
|
||||
{ counter: '38141', label: 'val2', level: 'warning', nested: { a: '1', b: ['2', '3'] } },
|
||||
{ counter: '38143', label: 'val2', level: 'info', nested: { a: '11', b: ['12', '13'] } },
|
||||
]);
|
||||
expect(result!.getAttributesAsLabels()).toStrictEqual([
|
||||
expect(result!.getLogFrameLabelsAsLabels()).toStrictEqual([
|
||||
{ counter: '38141', label: 'val2', level: 'warning', nested: `{"a":"1","b":["2","3"]}` },
|
||||
{ counter: '38143', label: 'val2', level: 'info', nested: `{"a":"11","b":["12","13"]}` },
|
||||
]);
|
||||
@ -85,11 +85,11 @@ describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
expect(result!.idField?.values[0]).toBe(id.values[0]);
|
||||
expect(result!.timeNanosecondField?.values[0]).toBe(ns.values[0]);
|
||||
expect(result!.severityField).toBeNull();
|
||||
expect(result!.getAttributes()).toStrictEqual([
|
||||
expect(result!.getLogFrameLabels()).toStrictEqual([
|
||||
{ counter: '34543', lable: 'val3', level: 'info' },
|
||||
{ counter: '34543', lable: 'val3', level: 'info' },
|
||||
]);
|
||||
expect(result!.getAttributesAsLabels()).toStrictEqual([
|
||||
expect(result!.getLogFrameLabelsAsLabels()).toStrictEqual([
|
||||
{ counter: '34543', lable: 'val3', level: 'info' },
|
||||
{ counter: '34543', lable: 'val3', level: 'info' },
|
||||
]);
|
||||
@ -123,11 +123,11 @@ describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
expect(result!.idField?.values[0]).toBe(id.values[0]);
|
||||
expect(result!.timeNanosecondField?.values[0]).toBe(ns.values[0]);
|
||||
expect(result!.severityField).toBeNull();
|
||||
expect(result!.getAttributes()).toStrictEqual([
|
||||
expect(result!.getLogFrameLabels()).toStrictEqual([
|
||||
{ counter: '38141', label: 'val2', level: 'warning' },
|
||||
{ counter: '38143', label: 'val2', level: 'info' },
|
||||
]);
|
||||
expect(result!.getAttributesAsLabels()).toStrictEqual([
|
||||
expect(result!.getLogFrameLabelsAsLabels()).toStrictEqual([
|
||||
{ counter: '38141', label: 'val2', level: 'warning' },
|
||||
{ counter: '38143', label: 'val2', level: 'info' },
|
||||
]);
|
||||
@ -161,8 +161,8 @@ describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
expect(result!.severityField?.values[0]).toBe(level.values[0]);
|
||||
expect(result!.idField).toBeNull();
|
||||
expect(result!.timeNanosecondField).toBeNull();
|
||||
expect(result!.getAttributesAsLabels()).toBeNull();
|
||||
expect(result!.getAttributes()).toBeNull();
|
||||
expect(result!.getLogFrameLabelsAsLabels()).toBeNull();
|
||||
expect(result!.getLogFrameLabels()).toBeNull();
|
||||
expect(result?.extraFields.map((f) => f.name)).toStrictEqual(['_source', 'hostname']);
|
||||
});
|
||||
|
||||
@ -182,8 +182,8 @@ describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
expect(result!.severityField).toBeNull();
|
||||
expect(result!.idField).toBeNull();
|
||||
expect(result!.timeNanosecondField).toBeNull();
|
||||
expect(result!.getAttributesAsLabels()).toBeNull();
|
||||
expect(result!.getAttributes()).toBeNull();
|
||||
expect(result!.getLogFrameLabelsAsLabels()).toBeNull();
|
||||
expect(result!.getLogFrameLabels()).toBeNull();
|
||||
expect(result?.extraFields).toStrictEqual([]);
|
||||
});
|
||||
|
||||
@ -208,15 +208,15 @@ describe('parseLogsFrame should parse different logs-dataframe formats', () => {
|
||||
expect(result!.severityField).toBeNull();
|
||||
expect(result!.idField).toBeNull();
|
||||
expect(result!.timeNanosecondField).toBeNull();
|
||||
expect(result!.getAttributesAsLabels()).toBeNull();
|
||||
expect(result!.getAttributes()).toBeNull();
|
||||
expect(result!.getLogFrameLabelsAsLabels()).toBeNull();
|
||||
expect(result!.getLogFrameLabels()).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('attributesToLabels', () => {
|
||||
describe('logFrameLabelsToLabels', () => {
|
||||
it('should convert nested structures correctly', () => {
|
||||
expect(
|
||||
attributesToLabels({
|
||||
logFrameLabelsToLabels({
|
||||
key1: 'val1',
|
||||
key2: ['k2v1', 'k2v2', 'k2v3'],
|
||||
key3: {
|
||||
@ -240,7 +240,7 @@ describe('attributesToLabels', () => {
|
||||
|
||||
it('should convert not-nested structures correctly', () => {
|
||||
expect(
|
||||
attributesToLabels({
|
||||
logFrameLabelsToLabels({
|
||||
key1: 'val1',
|
||||
key2: 'val2',
|
||||
})
|
||||
|
@ -4,18 +4,18 @@ import { parseLegacyLogsFrame } from './legacyLogsFrame';
|
||||
|
||||
// these are like Labels, but their values can be
|
||||
// arbitrary structures, not just strings
|
||||
export type Attributes = Record<string, unknown>;
|
||||
export type LogFrameLabels = Record<string, unknown>;
|
||||
|
||||
// the attributes-access is a little awkward, but it's necessary
|
||||
// because there are multiple,very different dataframe-represenations.
|
||||
// because there are multiple,very different dataFrame-representations.
|
||||
export type LogsFrame = {
|
||||
timeField: FieldWithIndex;
|
||||
bodyField: FieldWithIndex;
|
||||
timeNanosecondField: FieldWithIndex | null;
|
||||
severityField: FieldWithIndex | null;
|
||||
idField: FieldWithIndex | null;
|
||||
getAttributes: () => Attributes[] | null; // may be slow, so we only do it when asked for it explicitly
|
||||
getAttributesAsLabels: () => Labels[] | null; // temporarily exists to make the labels=>attributes migration simpler
|
||||
getLogFrameLabels: () => LogFrameLabels[] | null; // may be slow, so we only do it when asked for it explicitly
|
||||
getLogFrameLabelsAsLabels: () => Labels[] | null; // temporarily exists to make the labels=>attributes migration simpler
|
||||
extraFields: FieldWithIndex[];
|
||||
};
|
||||
|
||||
@ -32,12 +32,12 @@ const DATAPLANE_TIMESTAMP_NAME = 'timestamp';
|
||||
const DATAPLANE_BODY_NAME = 'body';
|
||||
const DATAPLANE_SEVERITY_NAME = 'severity';
|
||||
const DATAPLANE_ID_NAME = 'id';
|
||||
const DATAPLANE_ATTRIBUTES_NAME = 'attributes';
|
||||
const DATAPLANE_LABELS_NAME = 'labels';
|
||||
|
||||
export function attributesToLabels(attributes: Attributes): Labels {
|
||||
export function logFrameLabelsToLabels(logFrameLabels: LogFrameLabels): Labels {
|
||||
const result: Labels = {};
|
||||
|
||||
Object.entries(attributes).forEach(([k, v]) => {
|
||||
Object.entries(logFrameLabels).forEach(([k, v]) => {
|
||||
result[k] = typeof v === 'string' ? v : JSON.stringify(v);
|
||||
});
|
||||
|
||||
@ -57,9 +57,9 @@ function parseDataplaneLogsFrame(frame: DataFrame): LogsFrame | null {
|
||||
|
||||
const severityField = getField(cache, DATAPLANE_SEVERITY_NAME, FieldType.string) ?? null;
|
||||
const idField = getField(cache, DATAPLANE_ID_NAME, FieldType.string) ?? null;
|
||||
const attributesField = getField(cache, DATAPLANE_ATTRIBUTES_NAME, FieldType.other) ?? null;
|
||||
const labelsField = getField(cache, DATAPLANE_LABELS_NAME, FieldType.other) ?? null;
|
||||
|
||||
const attributes = attributesField === null ? null : attributesField.values;
|
||||
const labels = labelsField === null ? null : labelsField.values;
|
||||
|
||||
const extraFields = cache.fields.filter(
|
||||
(_, i) =>
|
||||
@ -67,7 +67,7 @@ function parseDataplaneLogsFrame(frame: DataFrame): LogsFrame | null {
|
||||
i !== bodyField.index &&
|
||||
i !== severityField?.index &&
|
||||
i !== idField?.index &&
|
||||
i !== attributesField?.index
|
||||
i !== labelsField?.index
|
||||
);
|
||||
|
||||
return {
|
||||
@ -75,9 +75,9 @@ function parseDataplaneLogsFrame(frame: DataFrame): LogsFrame | null {
|
||||
bodyField,
|
||||
severityField,
|
||||
idField,
|
||||
getAttributes: () => attributes,
|
||||
getLogFrameLabels: () => labels,
|
||||
timeNanosecondField: null,
|
||||
getAttributesAsLabels: () => (attributes !== null ? attributes.map(attributesToLabels) : null),
|
||||
getLogFrameLabelsAsLabels: () => (labels !== null ? labels.map(logFrameLabelsToLabels) : null),
|
||||
extraFields,
|
||||
};
|
||||
}
|
||||
|
@ -366,7 +366,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
|
||||
const logsFrame = parseLogsFrame(series);
|
||||
if (logsFrame != null) {
|
||||
// for now we ignore the nested-ness of attributes, and just stringify-them
|
||||
const frameLabels = logsFrame.getAttributesAsLabels() ?? undefined;
|
||||
const frameLabels = logsFrame.getLogFrameLabelsAsLabels() ?? undefined;
|
||||
const info = {
|
||||
rawFrame: series,
|
||||
logsFrame: logsFrame,
|
||||
|
@ -11,7 +11,7 @@ jest.mock('@grafana/data', () => ({
|
||||
}));
|
||||
|
||||
describe('logSeriesToLogsModel should parse different logs-dataframe formats', () => {
|
||||
it('should parse a dataplane-formatted logs-frame)', () => {
|
||||
it('should parse a dataplane-formatted logs-frame', () => {
|
||||
const frames: DataFrame[] = [
|
||||
{
|
||||
refId: 'A',
|
||||
@ -23,7 +23,7 @@ describe('logSeriesToLogsModel should parse different logs-dataframe formats', (
|
||||
values: ['info', 'debug', 'error'],
|
||||
},
|
||||
{
|
||||
name: 'attributes',
|
||||
name: 'labels',
|
||||
type: FieldType.other,
|
||||
config: {},
|
||||
values: [
|
||||
|
Loading…
Reference in New Issue
Block a user