DataFrame: convert from row based to a columnar value format (#18391)

This commit is contained in:
Ryan McKinley
2019-08-15 09:18:51 -07:00
committed by GitHub
parent 350b9a9494
commit e59bae55d9
63 changed files with 1856 additions and 995 deletions

View File

@@ -10,7 +10,6 @@ import {
findCommonLabels,
findUniqueLabels,
getLogLevel,
FieldCache,
FieldType,
getLogLevelFromKey,
LogRowModel,
@@ -20,11 +19,13 @@ import {
LogsParser,
LogLabelStatsModel,
LogsDedupStrategy,
DataFrameHelper,
GraphSeriesXY,
LoadingState,
dateTime,
toUtc,
NullValueMode,
toDataFrame,
} from '@grafana/data';
import { getThemeColor } from 'app/core/utils/colors';
import { hasAnsiCodes } from 'app/core/utils/text';
@@ -245,10 +246,11 @@ export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): Grap
return a[1] - b[1];
});
// EEEP: converts GraphSeriesXY to DataFrame and back again!
const data = toDataFrame(series);
const points = getFlotPairs({
rows: series.datapoints,
xIndex: 1,
yIndex: 0,
xField: data.fields[1],
yField: data.fields[0],
nullValueMode: NullValueMode.Null,
});
@@ -336,14 +338,56 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
for (let i = 0; i < logSeries.length; i++) {
const series = logSeries[i];
const fieldCache = new FieldCache(series.fields);
const data = new DataFrameHelper(series);
const uniqueLabels = findUniqueLabels(series.labels, commonLabels);
if (Object.keys(uniqueLabels).length > 0) {
hasUniqueLabels = true;
}
for (let j = 0; j < series.rows.length; j++) {
rows.push(processLogSeriesRow(series, fieldCache, j, uniqueLabels));
const timeFieldIndex = data.getFirstFieldOfType(FieldType.time);
const stringField = data.getFirstFieldOfType(FieldType.string);
const logLevelField = data.getFieldByName('level');
let seriesLogLevel: LogLevel | undefined = undefined;
if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
seriesLogLevel = getLogLevelFromKey(series.labels['level']);
}
for (let j = 0; j < data.length; j++) {
const ts = timeFieldIndex.values.get(j);
const time = dateTime(ts);
const timeEpochMs = time.valueOf();
const timeFromNow = time.fromNow();
const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
const timeUtc = toUtc(ts).format('YYYY-MM-DD HH:mm:ss');
const message = stringField.values.get(j);
let logLevel = LogLevel.unknown;
if (logLevelField) {
logLevel = getLogLevelFromKey(logLevelField.values.get(j));
} else if (seriesLogLevel) {
logLevel = seriesLogLevel;
} else {
logLevel = getLogLevel(message);
}
const hasAnsi = hasAnsiCodes(message);
const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
rows.push({
logLevel,
timeFromNow,
timeEpochMs,
timeLocal,
timeUtc,
uniqueLabels,
hasAnsi,
searchWords,
entry: hasAnsi ? ansicolor.strip(message) : message,
raw: message,
labels: series.labels,
timestamp: ts,
});
}
}
@@ -373,49 +417,3 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
rows,
};
}
export function processLogSeriesRow(
series: DataFrame,
fieldCache: FieldCache,
rowIndex: number,
uniqueLabels: Labels
): LogRowModel {
const row = series.rows[rowIndex];
const timeFieldIndex = fieldCache.getFirstFieldOfType(FieldType.time).index;
const ts = row[timeFieldIndex];
const stringFieldIndex = fieldCache.getFirstFieldOfType(FieldType.string).index;
const message = row[stringFieldIndex];
const time = dateTime(ts);
const timeEpochMs = time.valueOf();
const timeFromNow = time.fromNow();
const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
const timeUtc = toUtc(ts).format('YYYY-MM-DD HH:mm:ss');
let logLevel = LogLevel.unknown;
const logLevelField = fieldCache.getFieldByName('level');
if (logLevelField) {
logLevel = getLogLevelFromKey(row[logLevelField.index]);
} else if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
logLevel = getLogLevelFromKey(series.labels['level']);
} else {
logLevel = getLogLevel(message);
}
const hasAnsi = hasAnsiCodes(message);
const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
return {
logLevel,
timeFromNow,
timeEpochMs,
timeLocal,
timeUtc,
uniqueLabels,
hasAnsi,
searchWords,
entry: hasAnsi ? ansicolor.strip(message) : message,
raw: message,
labels: series.labels,
timestamp: ts,
};
}

View File

@@ -1,4 +1,13 @@
import { DataFrame, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/data';
import {
DataFrame,
FieldType,
LogsModel,
LogsMetaKind,
LogsDedupStrategy,
LogLevel,
DataFrameHelper,
toDataFrame,
} from '@grafana/data';
import {
dedupLogRows,
calculateFieldStats,
@@ -344,47 +353,46 @@ describe('dataFrameToLogsModel', () => {
it('given series without correct series name should return empty logs model', () => {
const series: DataFrame[] = [
{
toDataFrame({
fields: [],
rows: [],
},
}),
];
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given series without a time field should return empty logs model', () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
fields: [
{
name: 'message',
type: FieldType.string,
values: [],
},
],
rows: [],
},
}),
];
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given series without a string field should return empty logs model', () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
fields: [
{
name: 'time',
type: FieldType.time,
values: [],
},
],
rows: [],
},
}),
];
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given one series should return expected logs model', () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
labels: {
filename: '/var/log/grafana/grafana.log',
job: 'grafana',
@@ -393,26 +401,21 @@ describe('dataFrameToLogsModel', () => {
{
name: 'time',
type: FieldType.time,
values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
},
{
name: 'message',
type: FieldType.string,
values: [
't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
],
},
],
rows: [
[
'2019-04-26T09:28:11.352440161Z',
't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
],
[
'2019-04-26T14:42:50.991981292Z',
't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
],
],
meta: {
limit: 1000,
},
},
}),
];
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.hasUniqueLabels).toBeFalsy();
@@ -450,23 +453,25 @@ describe('dataFrameToLogsModel', () => {
it('given one series without labels should return expected logs model', () => {
const series: DataFrame[] = [
{
new DataFrameHelper({
fields: [
{
name: 'time',
type: FieldType.time,
values: ['1970-01-01T00:00:01Z'],
},
{
name: 'message',
type: FieldType.string,
values: ['WARN boooo'],
},
{
name: 'level',
type: FieldType.string,
values: ['dbug'],
},
],
rows: [['1970-01-01T00:00:01Z', 'WARN boooo', 'dbug']],
},
}),
];
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.rows).toHaveLength(1);
@@ -482,7 +487,7 @@ describe('dataFrameToLogsModel', () => {
it('given multiple series should return expected logs model', () => {
const series: DataFrame[] = [
{
toDataFrame({
labels: {
foo: 'bar',
baz: '1',
@@ -492,15 +497,16 @@ describe('dataFrameToLogsModel', () => {
{
name: 'ts',
type: FieldType.time,
values: ['1970-01-01T00:00:01Z'],
},
{
name: 'line',
type: FieldType.string,
values: ['WARN boooo'],
},
],
rows: [['1970-01-01T00:00:01Z', 'WARN boooo']],
},
{
}),
toDataFrame({
name: 'logs',
labels: {
foo: 'bar',
@@ -511,14 +517,15 @@ describe('dataFrameToLogsModel', () => {
{
name: 'time',
type: FieldType.time,
values: ['1970-01-01T00:00:00Z', '1970-01-01T00:00:02Z'],
},
{
name: 'message',
type: FieldType.string,
values: ['INFO 1', 'INFO 2'],
},
],
rows: [['1970-01-01T00:00:00Z', 'INFO 1'], ['1970-01-01T00:00:02Z', 'INFO 2']],
},
}),
];
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.hasUniqueLabels).toBeTruthy();