mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Refactor: Move LogLevel and Labels utils to @grafana/ui (#16285)
* rename Tags to Labels in SeriesData * move some logs stuff to grafana/ui * add roundtrip tests
This commit is contained in:
committed by
Torkel Ödegaard
parent
d0d5b38572
commit
bfba47c6c4
@@ -1,92 +1,6 @@
|
||||
import { LogLevel, LogsStream } from 'app/core/logs_model';
|
||||
import { LogsStream } from 'app/core/logs_model';
|
||||
|
||||
import {
|
||||
findCommonLabels,
|
||||
findUniqueLabels,
|
||||
formatLabels,
|
||||
getLogLevel,
|
||||
mergeStreamsToLogs,
|
||||
parseLabels,
|
||||
} from './result_transformer';
|
||||
|
||||
describe('getLoglevel()', () => {
|
||||
it('returns no log level on empty line', () => {
|
||||
expect(getLogLevel('')).toBe(LogLevel.unknown);
|
||||
});
|
||||
|
||||
it('returns no log level on when level is part of a word', () => {
|
||||
expect(getLogLevel('this is information')).toBe(LogLevel.unknown);
|
||||
});
|
||||
|
||||
it('returns same log level for long and short version', () => {
|
||||
expect(getLogLevel('[Warn]')).toBe(LogLevel.warning);
|
||||
expect(getLogLevel('[Warning]')).toBe(LogLevel.warning);
|
||||
expect(getLogLevel('[Warn]')).toBe('warning');
|
||||
});
|
||||
|
||||
it('returns log level on line contains a log level', () => {
|
||||
expect(getLogLevel('warn: it is looking bad')).toBe(LogLevel.warn);
|
||||
expect(getLogLevel('2007-12-12 12:12:12 [WARN]: it is looking bad')).toBe(LogLevel.warn);
|
||||
});
|
||||
|
||||
it('returns first log level found', () => {
|
||||
expect(getLogLevel('WARN this could be a debug message')).toBe(LogLevel.warn);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseLabels()', () => {
|
||||
it('returns no labels on empty labels string', () => {
|
||||
expect(parseLabels('')).toEqual({});
|
||||
expect(parseLabels('{}')).toEqual({});
|
||||
});
|
||||
|
||||
it('returns labels on labels string', () => {
|
||||
expect(parseLabels('{foo="bar", baz="42"}')).toEqual({ foo: 'bar', baz: '42' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatLabels()', () => {
|
||||
it('returns no labels on empty label set', () => {
|
||||
expect(formatLabels({})).toEqual('');
|
||||
expect(formatLabels({}, 'foo')).toEqual('foo');
|
||||
});
|
||||
|
||||
it('returns label string on label set', () => {
|
||||
expect(formatLabels({ foo: 'bar', baz: '42' })).toEqual('{baz="42", foo="bar"}');
|
||||
});
|
||||
});
|
||||
|
||||
describe('findCommonLabels()', () => {
|
||||
it('returns no common labels on empty sets', () => {
|
||||
expect(findCommonLabels([{}])).toEqual({});
|
||||
expect(findCommonLabels([{}, {}])).toEqual({});
|
||||
});
|
||||
|
||||
it('returns no common labels on differing sets', () => {
|
||||
expect(findCommonLabels([{ foo: 'bar' }, {}])).toEqual({});
|
||||
expect(findCommonLabels([{}, { foo: 'bar' }])).toEqual({});
|
||||
expect(findCommonLabels([{ baz: '42' }, { foo: 'bar' }])).toEqual({});
|
||||
expect(findCommonLabels([{ foo: '42', baz: 'bar' }, { foo: 'bar' }])).toEqual({});
|
||||
});
|
||||
|
||||
it('returns the single labels set as common labels', () => {
|
||||
expect(findCommonLabels([{ foo: 'bar' }])).toEqual({ foo: 'bar' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('findUniqueLabels()', () => {
|
||||
it('returns no uncommon labels on empty sets', () => {
|
||||
expect(findUniqueLabels({}, {})).toEqual({});
|
||||
});
|
||||
|
||||
it('returns all labels given no common labels', () => {
|
||||
expect(findUniqueLabels({ foo: '"bar"' }, {})).toEqual({ foo: '"bar"' });
|
||||
});
|
||||
|
||||
it('returns all labels except the common labels', () => {
|
||||
expect(findUniqueLabels({ foo: '"bar"', baz: '"42"' }, { foo: '"bar"' })).toEqual({ baz: '"42"' });
|
||||
});
|
||||
});
|
||||
import { mergeStreamsToLogs, logStreamToSeriesData, seriesDataToLogStream } from './result_transformer';
|
||||
|
||||
describe('mergeStreamsToLogs()', () => {
|
||||
it('returns empty logs given no streams', () => {
|
||||
@@ -201,3 +115,37 @@ describe('mergeStreamsToLogs()', () => {
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('convert SeriesData to/from LogStream', () => {
|
||||
const streams = [
|
||||
{
|
||||
labels: '{foo="bar"}',
|
||||
entries: [
|
||||
{
|
||||
line: "foo: [32m'bar'[39m",
|
||||
ts: '1970-01-01T00:00:00Z',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
labels: '{bar="foo"}',
|
||||
entries: [
|
||||
{
|
||||
line: "bar: 'foo'",
|
||||
ts: '1970-01-01T00:00:00Z',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
it('converts streams to series', () => {
|
||||
const data = streams.map(stream => logStreamToSeriesData(stream));
|
||||
|
||||
expect(data.length).toBe(2);
|
||||
expect(data[0].labels['foo']).toEqual('bar');
|
||||
expect(data[0].rows[0][0]).toEqual(streams[0].entries[0].ts);
|
||||
|
||||
const roundtrip = data.map(series => seriesDataToLogStream(series));
|
||||
expect(roundtrip.length).toBe(2);
|
||||
expect(roundtrip[0].labels).toEqual(streams[0].labels);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,120 +2,27 @@ import ansicolor from 'vendor/ansicolor/ansicolor';
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
|
||||
import {
|
||||
LogLevel,
|
||||
LogsMetaItem,
|
||||
LogsModel,
|
||||
LogRowModel,
|
||||
LogsStream,
|
||||
LogsStreamEntry,
|
||||
LogsStreamLabels,
|
||||
LogsMetaKind,
|
||||
} from 'app/core/logs_model';
|
||||
import { LogsMetaItem, LogsModel, LogRowModel, LogsStream, LogsStreamEntry, LogsMetaKind } from 'app/core/logs_model';
|
||||
import { hasAnsiCodes } from 'app/core/utils/text';
|
||||
import { DEFAULT_MAX_LINES } from './datasource';
|
||||
|
||||
/**
|
||||
* Returns the log level of a log line.
|
||||
* Parse the line for level words. If no level is found, it returns `LogLevel.unknown`.
|
||||
*
|
||||
* Example: `getLogLevel('WARN 1999-12-31 this is great') // LogLevel.warn`
|
||||
*/
|
||||
export function getLogLevel(line: string): LogLevel {
|
||||
if (!line) {
|
||||
return LogLevel.unknown;
|
||||
}
|
||||
let level: LogLevel;
|
||||
Object.keys(LogLevel).forEach(key => {
|
||||
if (!level) {
|
||||
const regexp = new RegExp(`\\b${key}\\b`, 'i');
|
||||
if (regexp.test(line)) {
|
||||
level = LogLevel[key];
|
||||
}
|
||||
}
|
||||
});
|
||||
if (!level) {
|
||||
level = LogLevel.unknown;
|
||||
}
|
||||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* Regexp to extract Prometheus-style labels
|
||||
*/
|
||||
const labelRegexp = /\b(\w+)(!?=~?)"([^"\n]*?)"/g;
|
||||
|
||||
/**
|
||||
* Returns a map of label keys to value from an input selector string.
|
||||
*
|
||||
* Example: `parseLabels('{job="foo", instance="bar"}) // {job: "foo", instance: "bar"}`
|
||||
*/
|
||||
export function parseLabels(labels: string): LogsStreamLabels {
|
||||
const labelsByKey: LogsStreamLabels = {};
|
||||
labels.replace(labelRegexp, (_, key, operator, value) => {
|
||||
labelsByKey[key] = value;
|
||||
return '';
|
||||
});
|
||||
return labelsByKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map labels that are common to the given label sets.
|
||||
*/
|
||||
export function findCommonLabels(labelsSets: LogsStreamLabels[]): LogsStreamLabels {
|
||||
return labelsSets.reduce((acc, labels) => {
|
||||
if (!labels) {
|
||||
throw new Error('Need parsed labels to find common labels.');
|
||||
}
|
||||
if (!acc) {
|
||||
// Initial set
|
||||
acc = { ...labels };
|
||||
} else {
|
||||
// Remove incoming labels that are missing or not matching in value
|
||||
Object.keys(labels).forEach(key => {
|
||||
if (acc[key] === undefined || acc[key] !== labels[key]) {
|
||||
delete acc[key];
|
||||
}
|
||||
});
|
||||
// Remove common labels that are missing from incoming label set
|
||||
Object.keys(acc).forEach(key => {
|
||||
if (labels[key] === undefined) {
|
||||
delete acc[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, undefined);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map of labels that are in `labels`, but not in `commonLabels`.
|
||||
*/
|
||||
export function findUniqueLabels(labels: LogsStreamLabels, commonLabels: LogsStreamLabels): LogsStreamLabels {
|
||||
const uncommonLabels: LogsStreamLabels = { ...labels };
|
||||
Object.keys(commonLabels).forEach(key => {
|
||||
delete uncommonLabels[key];
|
||||
});
|
||||
return uncommonLabels;
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes the given labels to a string.
|
||||
*/
|
||||
export function formatLabels(labels: LogsStreamLabels, defaultValue = ''): string {
|
||||
if (!labels || Object.keys(labels).length === 0) {
|
||||
return defaultValue;
|
||||
}
|
||||
const labelKeys = Object.keys(labels).sort();
|
||||
const cleanSelector = labelKeys.map(key => `${key}="${labels[key]}"`).join(', ');
|
||||
return ['{', cleanSelector, '}'].join('');
|
||||
}
|
||||
import {
|
||||
parseLabels,
|
||||
SeriesData,
|
||||
findUniqueLabels,
|
||||
Labels,
|
||||
findCommonLabels,
|
||||
getLogLevel,
|
||||
FieldType,
|
||||
formatLabels,
|
||||
guessFieldTypeFromSeries,
|
||||
} from '@grafana/ui';
|
||||
|
||||
export function processEntry(
|
||||
entry: LogsStreamEntry,
|
||||
labels: string,
|
||||
parsedLabels: LogsStreamLabels,
|
||||
uniqueLabels: LogsStreamLabels,
|
||||
parsedLabels: Labels,
|
||||
uniqueLabels: Labels,
|
||||
search: string
|
||||
): LogRowModel {
|
||||
const { line } = entry;
|
||||
@@ -201,3 +108,48 @@ export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_MAX_LI
|
||||
rows: sortedRows,
|
||||
};
|
||||
}
|
||||
|
||||
export function logStreamToSeriesData(stream: LogsStream): SeriesData {
|
||||
let labels: Labels = stream.parsedLabels;
|
||||
if (!labels && stream.labels) {
|
||||
labels = parseLabels(stream.labels);
|
||||
}
|
||||
return {
|
||||
labels,
|
||||
fields: [{ name: 'ts', type: FieldType.time }, { name: 'line', type: FieldType.string }],
|
||||
rows: stream.entries.map(entry => {
|
||||
return [entry.ts || entry.timestamp, entry.line];
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export function seriesDataToLogStream(series: SeriesData): LogsStream {
|
||||
let timeIndex = -1;
|
||||
let lineIndex = -1;
|
||||
for (let i = 0; i < series.fields.length; i++) {
|
||||
const field = series.fields[i];
|
||||
const type = field.type || guessFieldTypeFromSeries(series, i);
|
||||
if (timeIndex < 0 && type === FieldType.time) {
|
||||
timeIndex = i;
|
||||
}
|
||||
if (lineIndex < 0 && type === FieldType.string) {
|
||||
lineIndex = i;
|
||||
}
|
||||
}
|
||||
if (timeIndex < 0) {
|
||||
throw new Error('Series does not have a time field');
|
||||
}
|
||||
if (lineIndex < 0) {
|
||||
throw new Error('Series does not have a line field');
|
||||
}
|
||||
return {
|
||||
labels: formatLabels(series.labels),
|
||||
parsedLabels: series.labels,
|
||||
entries: series.rows.map(row => {
|
||||
return {
|
||||
line: row[lineIndex],
|
||||
ts: row[timeIndex],
|
||||
};
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user