mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Logs: move logs-functions from grafana-data to grafana-main (#55457)
* logs: move utils from grafana-data to grafana * logs: adjust imports * updated betterer checksum file
This commit is contained in:
@@ -4646,6 +4646,13 @@ exports[`better eslint`] = {
|
||||
"public/app/features/logs/components/logParser.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
],
|
||||
"public/app/features/logs/utils.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "4"]
|
||||
],
|
||||
"public/app/features/manage-dashboards/DashboardImportPage.tsx:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
|
||||
|
@@ -18,8 +18,6 @@ import {
|
||||
FieldWithIndex,
|
||||
findCommonLabels,
|
||||
findUniqueLabels,
|
||||
getLogLevel,
|
||||
getLogLevelFromKey,
|
||||
Labels,
|
||||
LoadingState,
|
||||
LogLevel,
|
||||
@@ -31,7 +29,6 @@ import {
|
||||
MutableDataFrame,
|
||||
rangeUtil,
|
||||
ScopedVars,
|
||||
sortInAscendingOrder,
|
||||
textUtil,
|
||||
TimeRange,
|
||||
toDataFrame,
|
||||
@@ -42,6 +39,7 @@ import { BarAlignment, GraphDrawStyle, StackingMode } from '@grafana/schema';
|
||||
import { ansicolor, colors } from '@grafana/ui';
|
||||
import { getThemeColor } from 'app/core/utils/colors';
|
||||
|
||||
import { getLogLevel, getLogLevelFromKey, sortInAscendingOrder } from '../features/logs/utils';
|
||||
export const LIMIT_LABEL = 'Line limit';
|
||||
export const COMMON_LABELS = 'Common labels';
|
||||
|
||||
|
@@ -1,16 +1,10 @@
|
||||
import { AnyAction, createAction, PayloadAction } from '@reduxjs/toolkit';
|
||||
|
||||
import {
|
||||
AbsoluteTimeRange,
|
||||
dateTimeForTimeZone,
|
||||
LoadingState,
|
||||
RawTimeRange,
|
||||
sortLogsResult,
|
||||
TimeRange,
|
||||
} from '@grafana/data';
|
||||
import { AbsoluteTimeRange, dateTimeForTimeZone, LoadingState, RawTimeRange, TimeRange } from '@grafana/data';
|
||||
import { getTemplateSrv } from '@grafana/runtime';
|
||||
import { RefreshPicker } from '@grafana/ui';
|
||||
import { getTimeRange, refreshIntervalToSortOrder, stopQueryState } from 'app/core/utils/explore';
|
||||
import { sortLogsResult } from 'app/features/logs/utils';
|
||||
import { getFiscalYearStartMonth, getTimeZone } from 'app/features/profile/state/selectors';
|
||||
import { ExploreItemState, ThunkResult } from 'app/types';
|
||||
import { ExploreId } from 'app/types/explore';
|
||||
|
@@ -8,7 +8,6 @@ import {
|
||||
FieldType,
|
||||
getDisplayProcessor,
|
||||
PanelData,
|
||||
sortLogsResult,
|
||||
standardTransformers,
|
||||
DataQuery,
|
||||
} from '@grafana/data';
|
||||
@@ -16,6 +15,7 @@ import { config } from '@grafana/runtime';
|
||||
|
||||
import { dataFrameToLogsModel } from '../../../core/logsModel';
|
||||
import { refreshIntervalToSortOrder } from '../../../core/utils/explore';
|
||||
import { sortLogsResult } from '../../../features/logs/utils';
|
||||
import { ExplorePanelData } from '../../../types';
|
||||
import { preProcessPanelData } from '../../query/state/runRequest';
|
||||
|
||||
|
@@ -2,18 +2,11 @@ import { css, cx } from '@emotion/css';
|
||||
import memoizeOne from 'memoize-one';
|
||||
import React, { PureComponent } from 'react';
|
||||
|
||||
import {
|
||||
calculateFieldStats,
|
||||
calculateLogsLabelStats,
|
||||
calculateStats,
|
||||
Field,
|
||||
getParser,
|
||||
LinkModel,
|
||||
LogRowModel,
|
||||
GrafanaTheme2,
|
||||
} from '@grafana/data';
|
||||
import { Field, LinkModel, LogRowModel, GrafanaTheme2 } from '@grafana/data';
|
||||
import { withTheme2, Themeable2, Icon, Tooltip } from '@grafana/ui';
|
||||
|
||||
import { calculateFieldStats, calculateLogsLabelStats, calculateStats, getParser } from '../utils';
|
||||
|
||||
import { LogDetailsRow } from './LogDetailsRow';
|
||||
import { getLogRowStyles } from './getLogRowStyles';
|
||||
import { getAllFields } from './logParser';
|
||||
|
@@ -9,13 +9,13 @@ import {
|
||||
TimeZone,
|
||||
DataQueryResponse,
|
||||
dateTimeFormat,
|
||||
checkLogsError,
|
||||
escapeUnescapedString,
|
||||
GrafanaTheme2,
|
||||
CoreApp,
|
||||
} from '@grafana/data';
|
||||
import { styleMixins, withTheme2, Themeable2, Icon, Tooltip } from '@grafana/ui';
|
||||
|
||||
import { checkLogsError, escapeUnescapedString } from '../utils';
|
||||
|
||||
import { LogDetails } from './LogDetails';
|
||||
import { LogLabels } from './LogLabels';
|
||||
import {
|
||||
|
@@ -1,18 +1,11 @@
|
||||
import memoizeOne from 'memoize-one';
|
||||
import React, { PureComponent } from 'react';
|
||||
|
||||
import {
|
||||
TimeZone,
|
||||
LogsDedupStrategy,
|
||||
LogRowModel,
|
||||
Field,
|
||||
LinkModel,
|
||||
LogsSortOrder,
|
||||
sortLogRows,
|
||||
CoreApp,
|
||||
} from '@grafana/data';
|
||||
import { TimeZone, LogsDedupStrategy, LogRowModel, Field, LinkModel, LogsSortOrder, CoreApp } from '@grafana/data';
|
||||
import { withTheme2, Themeable2 } from '@grafana/ui';
|
||||
|
||||
import { sortLogRows } from '../utils';
|
||||
|
||||
//Components
|
||||
import { LogRow } from './LogRow';
|
||||
import { RowContextOptions } from './LogRowContextProvider';
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import memoizeOne from 'memoize-one';
|
||||
|
||||
import { Field, FieldType, getParser, LinkModel, LogRowModel } from '@grafana/data';
|
||||
import { Field, FieldType, LinkModel, LogRowModel } from '@grafana/data';
|
||||
|
||||
import { getParser } from '../utils';
|
||||
|
||||
import { MAX_CHARACTERS } from './LogRowMessage';
|
||||
|
||||
|
370
public/app/features/logs/utils.test.ts
Normal file
370
public/app/features/logs/utils.test.ts
Normal file
@@ -0,0 +1,370 @@
|
||||
import { Labels, LogLevel, LogsModel, LogRowModel, LogsSortOrder, MutableDataFrame } from '@grafana/data';
|
||||
|
||||
import {
|
||||
getLogLevel,
|
||||
calculateLogsLabelStats,
|
||||
calculateFieldStats,
|
||||
getParser,
|
||||
LogsParsers,
|
||||
calculateStats,
|
||||
getLogLevelFromKey,
|
||||
sortLogsResult,
|
||||
checkLogsError,
|
||||
} from './utils';
|
||||
|
||||
describe('getLoglevel()', () => {
|
||||
it('returns no log level on empty line', () => {
|
||||
expect(getLogLevel('')).toBe(LogLevel.unknown);
|
||||
});
|
||||
|
||||
it('returns no log level on when level is part of a word', () => {
|
||||
expect(getLogLevel('who warns us')).toBe(LogLevel.unknown);
|
||||
});
|
||||
|
||||
it('returns same log level for long and short version', () => {
|
||||
expect(getLogLevel('[Warn]')).toBe(LogLevel.warning);
|
||||
expect(getLogLevel('[Warning]')).toBe(LogLevel.warning);
|
||||
expect(getLogLevel('[Warn]')).toBe('warning');
|
||||
});
|
||||
|
||||
it('returns correct log level when level is capitalized', () => {
|
||||
expect(getLogLevel('WARN')).toBe(LogLevel.warn);
|
||||
});
|
||||
|
||||
it('returns log level on line contains a log level', () => {
|
||||
expect(getLogLevel('warn: it is looking bad')).toBe(LogLevel.warn);
|
||||
expect(getLogLevel('2007-12-12 12:12:12 [WARN]: it is looking bad')).toBe(LogLevel.warn);
|
||||
});
|
||||
|
||||
it('returns first log level found', () => {
|
||||
expect(getLogLevel('WARN this could be a debug message')).toBe(LogLevel.warn);
|
||||
expect(getLogLevel('WARN this is a non-critical message')).toBe(LogLevel.warn);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLogLevelFromKey()', () => {
|
||||
it('returns correct log level', () => {
|
||||
expect(getLogLevelFromKey('info')).toBe(LogLevel.info);
|
||||
});
|
||||
it('returns correct log level when level is capitalized', () => {
|
||||
expect(getLogLevelFromKey('INFO')).toBe(LogLevel.info);
|
||||
});
|
||||
it('returns unknown log level when level is integer', () => {
|
||||
expect(getLogLevelFromKey(1)).toBe(LogLevel.unknown);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateLogsLabelStats()', () => {
|
||||
test('should return no stats for empty rows', () => {
|
||||
expect(calculateLogsLabelStats([], '')).toEqual([]);
|
||||
});
|
||||
|
||||
test('should return no stats of label is not found', () => {
|
||||
const rows = [
|
||||
{
|
||||
entry: 'foo 1',
|
||||
labels: {
|
||||
foo: 'bar',
|
||||
} as Labels,
|
||||
},
|
||||
] as LogRowModel[];
|
||||
|
||||
expect(calculateLogsLabelStats(rows, 'baz')).toEqual([]);
|
||||
});
|
||||
|
||||
test('should return stats for found labels', () => {
|
||||
const rows = [
|
||||
{
|
||||
entry: 'foo 1',
|
||||
labels: {
|
||||
foo: 'bar',
|
||||
} as Labels,
|
||||
},
|
||||
{
|
||||
entry: 'foo 0',
|
||||
labels: {
|
||||
foo: 'xxx',
|
||||
} as Labels,
|
||||
},
|
||||
{
|
||||
entry: 'foo 2',
|
||||
labels: {
|
||||
foo: 'bar',
|
||||
} as Labels,
|
||||
},
|
||||
] as LogRowModel[];
|
||||
|
||||
expect(calculateLogsLabelStats(rows, 'foo')).toMatchObject([
|
||||
{
|
||||
value: 'bar',
|
||||
count: 2,
|
||||
},
|
||||
{
|
||||
value: 'xxx',
|
||||
count: 1,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('LogsParsers', () => {
|
||||
describe('logfmt', () => {
|
||||
const parser = LogsParsers.logfmt;
|
||||
|
||||
test('should detect format', () => {
|
||||
expect(parser.test('foo')).toBeFalsy();
|
||||
expect(parser.test('foo=bar')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should return detected fields', () => {
|
||||
expect(
|
||||
parser.getFields(
|
||||
'foo=bar baz="42 + 1" msg="[resolver] received A record \\"127.0.0.1\\" for \\"localhost.\\" from udp:192.168.65.1" time(ms)=50 label{foo}=bar'
|
||||
)
|
||||
).toEqual([
|
||||
'foo=bar',
|
||||
'baz="42 + 1"',
|
||||
'msg="[resolver] received A record \\"127.0.0.1\\" for \\"localhost.\\" from udp:192.168.65.1"',
|
||||
'time(ms)=50',
|
||||
'label{foo}=bar',
|
||||
]);
|
||||
});
|
||||
|
||||
test('should return label for field', () => {
|
||||
expect(parser.getLabelFromField('foo=bar')).toBe('foo');
|
||||
expect(parser.getLabelFromField('time(ms)=50')).toBe('time(ms)');
|
||||
});
|
||||
|
||||
test('should return value for field', () => {
|
||||
expect(parser.getValueFromField('foo=bar')).toBe('bar');
|
||||
expect(parser.getValueFromField('time(ms)=50')).toBe('50');
|
||||
expect(
|
||||
parser.getValueFromField(
|
||||
'msg="[resolver] received A record \\"127.0.0.1\\" for \\"localhost.\\" from udp:192.168.65.1"'
|
||||
)
|
||||
).toBe('"[resolver] received A record \\"127.0.0.1\\" for \\"localhost.\\" from udp:192.168.65.1"');
|
||||
});
|
||||
|
||||
test('should build a valid value matcher', () => {
|
||||
const matcher = parser.buildMatcher('foo');
|
||||
const match = 'foo=bar'.match(matcher);
|
||||
expect(match).toBeDefined();
|
||||
expect(match![1]).toBe('bar');
|
||||
});
|
||||
|
||||
test('should build a valid complex value matcher', () => {
|
||||
const matcher = parser.buildMatcher('time(ms)');
|
||||
const match = 'time(ms)=50'.match(matcher);
|
||||
expect(match).toBeDefined();
|
||||
expect(match![1]).toBe('50');
|
||||
});
|
||||
});
|
||||
|
||||
describe('JSON', () => {
|
||||
const parser = LogsParsers.JSON;
|
||||
|
||||
test('should detect format', () => {
|
||||
expect(parser.test('foo')).toBeFalsy();
|
||||
expect(parser.test('"foo"')).toBeFalsy();
|
||||
expect(parser.test('{"foo":"bar"}')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should return detected fields', () => {
|
||||
expect(parser.getFields('{ "foo" : "bar", "baz" : 42 }')).toEqual(['"foo":"bar"', '"baz":42']);
|
||||
});
|
||||
|
||||
test('should return detected fields for nested quotes', () => {
|
||||
expect(parser.getFields(`{"foo":"bar: '[value=\\"42\\"]'"}`)).toEqual([`"foo":"bar: '[value=\\"42\\"]'"`]);
|
||||
});
|
||||
|
||||
test('should return label for field', () => {
|
||||
expect(parser.getLabelFromField('"foo" : "bar"')).toBe('foo');
|
||||
expect(parser.getLabelFromField('"docker.memory.fail.count":0')).toBe('docker.memory.fail.count');
|
||||
});
|
||||
|
||||
test('should return value for field', () => {
|
||||
expect(parser.getValueFromField('"foo" : "bar"')).toBe('"bar"');
|
||||
expect(parser.getValueFromField('"foo" : 42')).toBe('42');
|
||||
expect(parser.getValueFromField('"foo" : 42.1')).toBe('42.1');
|
||||
});
|
||||
|
||||
test('should build a valid value matcher for strings', () => {
|
||||
const matcher = parser.buildMatcher('foo');
|
||||
const match = '{"foo":"bar"}'.match(matcher);
|
||||
expect(match).toBeDefined();
|
||||
expect(match![1]).toBe('bar');
|
||||
});
|
||||
|
||||
test('should build a valid value matcher for integers', () => {
|
||||
const matcher = parser.buildMatcher('foo');
|
||||
const match = '{"foo":42.1}'.match(matcher);
|
||||
expect(match).toBeDefined();
|
||||
expect(match![1]).toBe('42.1');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateFieldStats()', () => {
|
||||
test('should return no stats for empty rows', () => {
|
||||
expect(calculateFieldStats([], /foo=(.*)/)).toEqual([]);
|
||||
});
|
||||
|
||||
test('should return no stats if extractor does not match', () => {
|
||||
const rows = [
|
||||
{
|
||||
entry: 'foo=bar',
|
||||
},
|
||||
] as LogRowModel[];
|
||||
|
||||
expect(calculateFieldStats(rows, /baz=(.*)/)).toEqual([]);
|
||||
});
|
||||
|
||||
test('should return stats for found field', () => {
|
||||
const rows = [
|
||||
{
|
||||
entry: 'foo="42 + 1"',
|
||||
},
|
||||
{
|
||||
entry: 'foo=503 baz=foo',
|
||||
},
|
||||
{
|
||||
entry: 'foo="42 + 1"',
|
||||
},
|
||||
{
|
||||
entry: 't=2018-12-05T07:44:59+0000 foo=503',
|
||||
},
|
||||
] as LogRowModel[];
|
||||
|
||||
expect(calculateFieldStats(rows, /foo=("[^"]*"|\S+)/)).toMatchObject([
|
||||
{
|
||||
value: '"42 + 1"',
|
||||
count: 2,
|
||||
},
|
||||
{
|
||||
value: '503',
|
||||
count: 2,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateStats()', () => {
|
||||
test('should return no stats for empty array', () => {
|
||||
expect(calculateStats([])).toEqual([]);
|
||||
});
|
||||
|
||||
test('should return correct stats', () => {
|
||||
const values = ['one', 'one', null, undefined, 'two'];
|
||||
expect(calculateStats(values)).toMatchObject([
|
||||
{
|
||||
value: 'one',
|
||||
count: 2,
|
||||
proportion: 2 / 3,
|
||||
},
|
||||
{
|
||||
value: 'two',
|
||||
count: 1,
|
||||
proportion: 1 / 3,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getParser()', () => {
|
||||
test('should return no parser on empty line', () => {
|
||||
expect(getParser('')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should return no parser on unknown line pattern', () => {
|
||||
expect(getParser('To Be or not to be')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should return logfmt parser on key value patterns', () => {
|
||||
expect(getParser('foo=bar baz="41 + 1')).toEqual(LogsParsers.logfmt);
|
||||
});
|
||||
|
||||
test('should return JSON parser on JSON log lines', () => {
|
||||
// TODO implement other JSON value types than string
|
||||
expect(getParser('{"foo": "bar", "baz": "41 + 1"}')).toEqual(LogsParsers.JSON);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sortLogsResult', () => {
|
||||
const firstRow: LogRowModel = {
|
||||
rowIndex: 0,
|
||||
entryFieldIndex: 0,
|
||||
dataFrame: new MutableDataFrame(),
|
||||
entry: '',
|
||||
hasAnsi: false,
|
||||
hasUnescapedContent: false,
|
||||
labels: {},
|
||||
logLevel: LogLevel.info,
|
||||
raw: '',
|
||||
timeEpochMs: 0,
|
||||
timeEpochNs: '0',
|
||||
timeFromNow: '',
|
||||
timeLocal: '',
|
||||
timeUtc: '',
|
||||
uid: '1',
|
||||
};
|
||||
const sameAsFirstRow = firstRow;
|
||||
const secondRow: LogRowModel = {
|
||||
rowIndex: 1,
|
||||
entryFieldIndex: 0,
|
||||
dataFrame: new MutableDataFrame(),
|
||||
entry: '',
|
||||
hasAnsi: false,
|
||||
hasUnescapedContent: false,
|
||||
labels: {},
|
||||
logLevel: LogLevel.info,
|
||||
raw: '',
|
||||
timeEpochMs: 10,
|
||||
timeEpochNs: '10000000',
|
||||
timeFromNow: '',
|
||||
timeLocal: '',
|
||||
timeUtc: '',
|
||||
uid: '2',
|
||||
};
|
||||
|
||||
describe('when called with LogsSortOrder.Descending', () => {
|
||||
it('then it should sort descending', () => {
|
||||
const logsResult: LogsModel = {
|
||||
rows: [firstRow, sameAsFirstRow, secondRow],
|
||||
hasUniqueLabels: false,
|
||||
};
|
||||
const result = sortLogsResult(logsResult, LogsSortOrder.Descending);
|
||||
|
||||
expect(result).toEqual({
|
||||
rows: [secondRow, firstRow, sameAsFirstRow],
|
||||
hasUniqueLabels: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when called with LogsSortOrder.Ascending', () => {
|
||||
it('then it should sort ascending', () => {
|
||||
const logsResult: LogsModel = {
|
||||
rows: [secondRow, firstRow, sameAsFirstRow],
|
||||
hasUniqueLabels: false,
|
||||
};
|
||||
const result = sortLogsResult(logsResult, LogsSortOrder.Ascending);
|
||||
|
||||
expect(result).toEqual({
|
||||
rows: [firstRow, sameAsFirstRow, secondRow],
|
||||
hasUniqueLabels: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkLogsError()', () => {
|
||||
const log = {
|
||||
labels: {
|
||||
__error__: 'Error Message',
|
||||
foo: 'boo',
|
||||
} as Labels,
|
||||
} as LogRowModel;
|
||||
test('should return correct error if error is present', () => {
|
||||
expect(checkLogsError(log)).toStrictEqual({ hasError: true, errorMessage: 'Error Message' });
|
||||
});
|
||||
});
|
240
public/app/features/logs/utils.ts
Normal file
240
public/app/features/logs/utils.ts
Normal file
@@ -0,0 +1,240 @@
|
||||
import { countBy, chain, escapeRegExp } from 'lodash';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
FieldType,
|
||||
LogLevel,
|
||||
LogRowModel,
|
||||
LogLabelStatsModel,
|
||||
LogsParser,
|
||||
LogsModel,
|
||||
LogsSortOrder,
|
||||
} from '@grafana/data';
|
||||
|
||||
// This matches:
|
||||
// first a label from start of the string or first white space, then any word chars until "="
|
||||
// second either an empty quotes, or anything that starts with quote and ends with unescaped quote,
|
||||
// or any non whitespace chars that do not start with quote
|
||||
const LOGFMT_REGEXP = /(?:^|\s)([\w\(\)\[\]\{\}]+)=(""|(?:".*?[^\\]"|[^"\s]\S*))/;
|
||||
|
||||
/**
|
||||
* Returns the log level of a log line.
|
||||
* Parse the line for level words. If no level is found, it returns `LogLevel.unknown`.
|
||||
*
|
||||
* Example: `getLogLevel('WARN 1999-12-31 this is great') // LogLevel.warn`
|
||||
*/
|
||||
export function getLogLevel(line: string): LogLevel {
|
||||
if (!line) {
|
||||
return LogLevel.unknown;
|
||||
}
|
||||
let level = LogLevel.unknown;
|
||||
let currentIndex: number | undefined = undefined;
|
||||
|
||||
for (const key of Object.keys(LogLevel)) {
|
||||
const regexp = new RegExp(`\\b${key}\\b`, 'i');
|
||||
const result = regexp.exec(line);
|
||||
|
||||
if (result) {
|
||||
if (currentIndex === undefined || result.index < currentIndex) {
|
||||
level = (LogLevel as any)[key];
|
||||
currentIndex = result.index;
|
||||
}
|
||||
}
|
||||
}
|
||||
return level;
|
||||
}
|
||||
|
||||
export function getLogLevelFromKey(key: string | number): LogLevel {
|
||||
const level = (LogLevel as any)[key.toString().toLowerCase()];
|
||||
if (level) {
|
||||
return level;
|
||||
}
|
||||
|
||||
return LogLevel.unknown;
|
||||
}
|
||||
|
||||
export function addLogLevelToSeries(series: DataFrame, lineIndex: number): DataFrame {
|
||||
const levels = new ArrayVector<LogLevel>();
|
||||
const lines = series.fields[lineIndex];
|
||||
for (let i = 0; i < lines.values.length; i++) {
|
||||
const line = lines.values.get(lineIndex);
|
||||
levels.buffer.push(getLogLevel(line));
|
||||
}
|
||||
|
||||
return {
|
||||
...series, // Keeps Tags, RefID etc
|
||||
fields: [
|
||||
...series.fields,
|
||||
{
|
||||
name: 'LogLevel',
|
||||
type: FieldType.string,
|
||||
values: levels,
|
||||
config: {},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export const LogsParsers: { [name: string]: LogsParser } = {
|
||||
JSON: {
|
||||
buildMatcher: (label) => new RegExp(`(?:{|,)\\s*"${label}"\\s*:\\s*"?([\\d\\.]+|[^"]*)"?`),
|
||||
getFields: (line) => {
|
||||
try {
|
||||
const parsed = JSON.parse(line);
|
||||
return Object.keys(parsed).map((key) => {
|
||||
return `"${key}":${JSON.stringify(parsed[key])}`;
|
||||
});
|
||||
} catch {}
|
||||
return [];
|
||||
},
|
||||
getLabelFromField: (field) => (field.match(/^"([^"]+)"\s*:/) || [])[1],
|
||||
getValueFromField: (field) => (field.match(/:\s*(.*)$/) || [])[1],
|
||||
test: (line) => {
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(line);
|
||||
} catch (error) {}
|
||||
// The JSON parser should only be used for log lines that are valid serialized JSON objects.
|
||||
// If it would be used for a string, detected fields would include each letter as a separate field.
|
||||
return typeof parsed === 'object';
|
||||
},
|
||||
},
|
||||
|
||||
logfmt: {
|
||||
buildMatcher: (label) => new RegExp(`(?:^|\\s)${escapeRegExp(label)}=("[^"]*"|\\S+)`),
|
||||
getFields: (line) => {
|
||||
const fields: string[] = [];
|
||||
line.replace(new RegExp(LOGFMT_REGEXP, 'g'), (substring) => {
|
||||
fields.push(substring.trim());
|
||||
return '';
|
||||
});
|
||||
return fields;
|
||||
},
|
||||
getLabelFromField: (field) => (field.match(LOGFMT_REGEXP) || [])[1],
|
||||
getValueFromField: (field) => (field.match(LOGFMT_REGEXP) || [])[2],
|
||||
test: (line) => LOGFMT_REGEXP.test(line),
|
||||
},
|
||||
};
|
||||
|
||||
export function calculateFieldStats(rows: LogRowModel[], extractor: RegExp): LogLabelStatsModel[] {
|
||||
// Consider only rows that satisfy the matcher
|
||||
const rowsWithField = rows.filter((row) => extractor.test(row.entry));
|
||||
const rowCount = rowsWithField.length;
|
||||
|
||||
// Get field value counts for eligible rows
|
||||
const countsByValue = countBy(rowsWithField, (r) => {
|
||||
const row: LogRowModel = r;
|
||||
const match = row.entry.match(extractor);
|
||||
|
||||
return match ? match[1] : null;
|
||||
});
|
||||
return getSortedCounts(countsByValue, rowCount);
|
||||
}
|
||||
|
||||
export function calculateLogsLabelStats(rows: LogRowModel[], label: string): LogLabelStatsModel[] {
|
||||
// Consider only rows that have the given label
|
||||
const rowsWithLabel = rows.filter((row) => row.labels[label] !== undefined);
|
||||
const rowCount = rowsWithLabel.length;
|
||||
|
||||
// Get label value counts for eligible rows
|
||||
const countsByValue = countBy(rowsWithLabel, (row) => (row as LogRowModel).labels[label]);
|
||||
return getSortedCounts(countsByValue, rowCount);
|
||||
}
|
||||
|
||||
export function calculateStats(values: unknown[]): LogLabelStatsModel[] {
|
||||
const nonEmptyValues = values.filter((value) => value !== undefined && value !== null);
|
||||
const countsByValue = countBy(nonEmptyValues);
|
||||
return getSortedCounts(countsByValue, nonEmptyValues.length);
|
||||
}
|
||||
|
||||
const getSortedCounts = (countsByValue: { [value: string]: number }, rowCount: number) => {
|
||||
return chain(countsByValue)
|
||||
.map((count, value) => ({ count, value, proportion: count / rowCount }))
|
||||
.sortBy('count')
|
||||
.reverse()
|
||||
.value();
|
||||
};
|
||||
|
||||
export function getParser(line: string): LogsParser | undefined {
|
||||
let parser;
|
||||
try {
|
||||
if (LogsParsers.JSON.test(line)) {
|
||||
parser = LogsParsers.JSON;
|
||||
}
|
||||
} catch (error) {}
|
||||
|
||||
if (!parser && LogsParsers.logfmt.test(line)) {
|
||||
parser = LogsParsers.logfmt;
|
||||
}
|
||||
|
||||
return parser;
|
||||
}
|
||||
|
||||
export const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
|
||||
// compare milliseconds
|
||||
if (a.timeEpochMs < b.timeEpochMs) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (a.timeEpochMs > b.timeEpochMs) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// if milliseconds are equal, compare nanoseconds
|
||||
if (a.timeEpochNs < b.timeEpochNs) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (a.timeEpochNs > b.timeEpochNs) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
|
||||
export const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => {
|
||||
// compare milliseconds
|
||||
if (a.timeEpochMs > b.timeEpochMs) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (a.timeEpochMs < b.timeEpochMs) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// if milliseconds are equal, compare nanoseconds
|
||||
if (a.timeEpochNs > b.timeEpochNs) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (a.timeEpochNs < b.timeEpochNs) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
|
||||
export const sortLogsResult = (logsResult: LogsModel | null, sortOrder: LogsSortOrder): LogsModel => {
|
||||
const rows = logsResult ? sortLogRows(logsResult.rows, sortOrder) : [];
|
||||
return logsResult ? { ...logsResult, rows } : { hasUniqueLabels: false, rows };
|
||||
};
|
||||
|
||||
export const sortLogRows = (logRows: LogRowModel[], sortOrder: LogsSortOrder) =>
|
||||
sortOrder === LogsSortOrder.Ascending ? logRows.sort(sortInAscendingOrder) : logRows.sort(sortInDescendingOrder);
|
||||
|
||||
// Currently supports only error condition in Loki logs
|
||||
export const checkLogsError = (logRow: LogRowModel): { hasError: boolean; errorMessage?: string } => {
|
||||
if (logRow.labels.__error__) {
|
||||
return {
|
||||
hasError: true,
|
||||
errorMessage: logRow.labels.__error__,
|
||||
};
|
||||
}
|
||||
return {
|
||||
hasError: false,
|
||||
};
|
||||
};
|
||||
|
||||
export const escapeUnescapedString = (string: string) =>
|
||||
string.replace(/\\r\\n|\\n|\\t|\\r/g, (match: string) => (match.slice(1) === 't' ? '\t' : '\n'));
|
@@ -17,7 +17,6 @@ import {
|
||||
Field,
|
||||
getDefaultTimeRange,
|
||||
AbstractQuery,
|
||||
getLogLevelFromKey,
|
||||
LogLevel,
|
||||
LogRowModel,
|
||||
MetricFindValue,
|
||||
@@ -32,6 +31,7 @@ import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import { RowContextOptions } from '../../../features/logs/components/LogRowContextProvider';
|
||||
import { getLogLevelFromKey } from '../../../features/logs/utils';
|
||||
|
||||
import { ElasticResponse } from './ElasticResponse';
|
||||
import { IndexPattern } from './IndexPattern';
|
||||
|
@@ -21,7 +21,6 @@ import {
|
||||
FieldCache,
|
||||
AbstractQuery,
|
||||
FieldType,
|
||||
getLogLevelFromKey,
|
||||
Labels,
|
||||
LoadingState,
|
||||
LogLevel,
|
||||
@@ -42,6 +41,7 @@ import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_sr
|
||||
|
||||
import { serializeParams } from '../../../core/utils/fetch';
|
||||
import { RowContextOptions } from '../../../features/logs/components/LogRowContextProvider';
|
||||
import { getLogLevelFromKey } from '../../../features/logs/utils';
|
||||
import { renderLegendFormat } from '../prometheus/legend';
|
||||
import { replaceVariables, returnVariables } from '../prometheus/querybuilder/shared/parsingUtils';
|
||||
|
||||
|
@@ -1,4 +1,6 @@
|
||||
import { DataFrame, FieldType, getParser, Labels, LogsParsers } from '@grafana/data';
|
||||
import { DataFrame, FieldType, Labels } from '@grafana/data';
|
||||
|
||||
import { getParser, LogsParsers } from '../../../features/logs/utils';
|
||||
|
||||
export function dataFrameHasLokiError(frame: DataFrame): boolean {
|
||||
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values.toArray() ?? [];
|
||||
|
Reference in New Issue
Block a user