Explore: Use SeriesData format for loki/logs (#16793)

This is the first step moving towards Explore supporting logs for 
more datasources than Loki. In the first step we move all the log 
processing from Loki into Explore.
- Make explore convert logs result returned from datasources to SeriesData, 
if needed, and for now convert SeriesData to LogsModel.
- Loki datasource query now returns SeriesData and all
processing have been moved into explore instead.
- Removed key from LogRowModel and use log row indexes as 
the unique key instead.
- Removed id from LogsModel since it looks like it's not in use.
- Introduced a new class FieldCache which is convenient to use when
looking up multiple fields and/or field types and series values.

Closes #16287
This commit is contained in:
Marcus Efraimsson
2019-04-30 18:21:22 +02:00
committed by GitHub
parent 26bd76b4c2
commit fe20dde5db
17 changed files with 682 additions and 455 deletions

View File

@@ -1,122 +1,6 @@
import { LogsStream } from 'app/core/logs_model';
import { logStreamToSeriesData } from './result_transformer';
import { mergeStreamsToLogs, logStreamToSeriesData, seriesDataToLogStream } from './result_transformer';
describe('mergeStreamsToLogs()', () => {
it('returns empty logs given no streams', () => {
expect(mergeStreamsToLogs([]).rows).toEqual([]);
});
it('returns processed logs from single stream', () => {
const stream1: LogsStream = {
labels: '{foo="bar"}',
entries: [
{
line: 'WARN boooo',
ts: '1970-01-01T00:00:00Z',
},
],
};
expect(mergeStreamsToLogs([stream1]).rows).toMatchObject([
{
entry: 'WARN boooo',
labels: { foo: 'bar' },
key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
logLevel: 'warning',
uniqueLabels: {},
},
]);
});
it('returns merged logs from multiple streams sorted by time and with unique labels', () => {
const stream1: LogsStream = {
labels: '{foo="bar", baz="1"}',
entries: [
{
line: 'WARN boooo',
ts: '1970-01-01T00:00:01Z',
},
],
};
const stream2: LogsStream = {
labels: '{foo="bar", baz="2"}',
entries: [
{
line: 'INFO 1',
ts: '1970-01-01T00:00:00Z',
},
{
line: 'INFO 2',
ts: '1970-01-01T00:00:02Z',
},
],
};
expect(mergeStreamsToLogs([stream1, stream2]).rows).toMatchObject([
{
entry: 'INFO 2',
labels: { foo: 'bar', baz: '2' },
logLevel: 'info',
uniqueLabels: { baz: '2' },
},
{
entry: 'WARN boooo',
labels: { foo: 'bar', baz: '1' },
logLevel: 'warning',
uniqueLabels: { baz: '1' },
},
{
entry: 'INFO 1',
labels: { foo: 'bar', baz: '2' },
logLevel: 'info',
uniqueLabels: { baz: '2' },
},
]);
});
it('detects ANSI codes', () => {
expect(
mergeStreamsToLogs([
{
labels: '{foo="bar"}',
entries: [
{
line: "foo: 'bar'",
ts: '1970-01-01T00:00:00Z',
},
],
},
{
labels: '{bar="foo"}',
entries: [
{
line: "bar: 'foo'",
ts: '1970-01-01T00:00:00Z',
},
],
},
]).rows
).toMatchObject([
{
entry: "bar: 'foo'",
hasAnsi: false,
key: 'EK1970-01-01T00:00:00Z{bar="foo"}',
labels: { bar: 'foo' },
logLevel: 'unknown',
raw: "bar: 'foo'",
},
{
entry: "foo: 'bar'",
hasAnsi: true,
key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
labels: { foo: 'bar' },
logLevel: 'unknown',
raw: "foo: 'bar'",
},
]);
});
});
describe('convert SeriesData to/from LogStream', () => {
describe('convert loki response to SeriesData', () => {
const streams = [
{
labels: '{foo="bar"}',
@@ -143,9 +27,8 @@ describe('convert SeriesData to/from LogStream', () => {
expect(data.length).toBe(2);
expect(data[0].labels['foo']).toEqual('bar');
expect(data[0].rows[0][0]).toEqual(streams[0].entries[0].ts);
const roundtrip = data.map(series => seriesDataToLogStream(series));
expect(roundtrip.length).toBe(2);
expect(roundtrip[0].labels).toEqual(streams[0].labels);
expect(data[0].rows[0][1]).toEqual(streams[0].entries[0].line);
expect(data[1].rows[0][0]).toEqual(streams[1].entries[0].ts);
expect(data[1].rows[0][1]).toEqual(streams[1].entries[0].line);
});
});