2019-09-05 08:04:01 -04:00
|
|
|
import { LokiLogsStream, LokiResponse } from './types';
|
|
|
|
|
import {
|
|
|
|
|
parseLabels,
|
|
|
|
|
FieldType,
|
|
|
|
|
Labels,
|
|
|
|
|
DataFrame,
|
|
|
|
|
ArrayVector,
|
|
|
|
|
MutableDataFrame,
|
|
|
|
|
findUniqueLabels,
|
|
|
|
|
} from '@grafana/data';
|
2018-07-20 17:07:17 +02:00
|
|
|
|
2019-09-05 08:04:01 -04:00
|
|
|
/**
|
|
|
|
|
* Transforms LokiLogStream structure into a dataFrame. Used when doing standard queries.
|
|
|
|
|
*/
|
|
|
|
|
export function logStreamToDataFrame(stream: LokiLogsStream, reverse?: boolean, refId?: string): DataFrame {
|
2019-03-29 01:41:37 -07:00
|
|
|
let labels: Labels = stream.parsedLabels;
|
|
|
|
|
if (!labels && stream.labels) {
|
|
|
|
|
labels = parseLabels(stream.labels);
|
|
|
|
|
}
|
2019-09-05 08:04:01 -04:00
|
|
|
const times = new ArrayVector<string>([]);
|
|
|
|
|
const lines = new ArrayVector<string>([]);
|
2019-09-30 14:44:15 +02:00
|
|
|
const uids = new ArrayVector<string>([]);
|
2019-08-15 09:18:51 -07:00
|
|
|
|
|
|
|
|
for (const entry of stream.entries) {
|
2019-09-30 14:44:15 +02:00
|
|
|
const ts = entry.ts || entry.timestamp;
|
|
|
|
|
times.add(ts);
|
2019-09-05 08:04:01 -04:00
|
|
|
lines.add(entry.line);
|
2019-09-30 14:44:15 +02:00
|
|
|
uids.add(`${ts}_${stream.labels}`);
|
2019-08-15 09:18:51 -07:00
|
|
|
}
|
|
|
|
|
|
2019-09-05 08:04:01 -04:00
|
|
|
if (reverse) {
|
|
|
|
|
times.buffer = times.buffer.reverse();
|
|
|
|
|
lines.buffer = lines.buffer.reverse();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
2019-08-15 09:18:51 -07:00
|
|
|
refId,
|
2019-03-29 01:41:37 -07:00
|
|
|
labels,
|
2019-08-15 09:18:51 -07:00
|
|
|
fields: [
|
2019-09-05 08:04:01 -04:00
|
|
|
{ name: 'ts', type: FieldType.time, config: { title: 'Time' }, values: times }, // Time
|
|
|
|
|
{ name: 'line', type: FieldType.string, config: {}, values: lines }, // Line
|
2019-09-30 14:44:15 +02:00
|
|
|
{ name: 'id', type: FieldType.string, config: {}, values: uids },
|
2019-08-15 09:18:51 -07:00
|
|
|
],
|
2019-09-05 08:04:01 -04:00
|
|
|
length: times.length,
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Transform LokiResponse data and appends it to MutableDataFrame. Used for streaming where the dataFrame can be
|
|
|
|
|
* a CircularDataFrame creating a fixed size rolling buffer.
|
|
|
|
|
* TODO: Probably could be unified with the logStreamToDataFrame function.
|
2019-09-30 14:44:15 +02:00
|
|
|
* @param response
|
|
|
|
|
* @param data Needs to have ts, line, labels, id as fields
|
2019-09-05 08:04:01 -04:00
|
|
|
*/
|
|
|
|
|
export function appendResponseToBufferedData(response: LokiResponse, data: MutableDataFrame) {
|
2019-09-30 14:44:15 +02:00
|
|
|
// Should we do anything with: response.dropped_entries?
|
2019-09-05 08:04:01 -04:00
|
|
|
|
|
|
|
|
const streams: LokiLogsStream[] = response.streams;
|
|
|
|
|
if (streams && streams.length) {
|
|
|
|
|
for (const stream of streams) {
|
|
|
|
|
// Find unique labels
|
|
|
|
|
const labels = parseLabels(stream.labels);
|
2019-09-30 14:44:15 +02:00
|
|
|
const unique = findUniqueLabels(labels, data.labels || {});
|
2019-09-05 08:04:01 -04:00
|
|
|
|
|
|
|
|
// Add each line
|
|
|
|
|
for (const entry of stream.entries) {
|
2019-09-30 14:44:15 +02:00
|
|
|
const ts = entry.ts || entry.timestamp;
|
|
|
|
|
data.values.ts.add(ts);
|
2019-09-05 08:04:01 -04:00
|
|
|
data.values.line.add(entry.line);
|
|
|
|
|
data.values.labels.add(unique);
|
2019-09-30 14:44:15 +02:00
|
|
|
data.values.id.add(`${ts}_${stream.labels}`);
|
2019-09-05 08:04:01 -04:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-03-29 01:41:37 -07:00
|
|
|
}
|