Explore/Loki: Display live tailed logs in correct order (#18031)

Closes #18027
This commit is contained in:
kay delaney 2019-07-10 13:57:23 +01:00 committed by GitHub
parent ffa9429c68
commit bf7fb67f73
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 24 additions and 27 deletions

View File

@ -324,10 +324,6 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
}
}
const sortedRows = rows.sort((a, b) => {
return a.timestamp > b.timestamp ? -1 : 1;
});
// Meta data to display in status
const meta: LogsMetaItem[] = [];
if (_.size(commonLabels) > 0) {
@ -343,7 +339,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
if (limits.length > 0) {
meta.push({
label: 'Limit',
value: `${limits[0].meta.limit} (${sortedRows.length} returned)`,
value: `${limits[0].meta.limit} (${rows.length} returned)`,
kind: LogsMetaKind.String,
});
}
@ -351,7 +347,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
return {
hasUniqueLabels,
meta,
rows: sortedRows,
rows,
};
}

View File

@ -418,13 +418,6 @@ describe('dataFrameToLogsModel', () => {
expect(logsModel.hasUniqueLabels).toBeFalsy();
expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows).toMatchObject([
{
timestamp: '2019-04-26T14:42:50.991981292Z',
entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'error',
uniqueLabels: {},
},
{
timestamp: '2019-04-26T09:28:11.352440161Z',
entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
@ -432,6 +425,13 @@ describe('dataFrameToLogsModel', () => {
logLevel: 'info',
uniqueLabels: {},
},
{
timestamp: '2019-04-26T14:42:50.991981292Z',
entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'error',
uniqueLabels: {},
},
]);
expect(logsModel.series).toHaveLength(2);
@ -524,12 +524,6 @@ describe('dataFrameToLogsModel', () => {
expect(logsModel.hasUniqueLabels).toBeTruthy();
expect(logsModel.rows).toHaveLength(3);
expect(logsModel.rows).toMatchObject([
{
entry: 'INFO 2',
labels: { foo: 'bar', baz: '2' },
logLevel: LogLevel.error,
uniqueLabels: { baz: '2' },
},
{
entry: 'WARN boooo',
labels: { foo: 'bar', baz: '1' },
@ -542,6 +536,12 @@ describe('dataFrameToLogsModel', () => {
logLevel: LogLevel.error,
uniqueLabels: { baz: '2' },
},
{
entry: 'INFO 2',
labels: { foo: 'bar', baz: '2' },
logLevel: LogLevel.error,
uniqueLabels: { baz: '2' },
},
]);
expect(logsModel.series).toHaveLength(2);

View File

@ -487,11 +487,11 @@ export const getRefIds = (value: any): string[] => {
};
const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timeEpochMs < b.timeEpochMs) {
if (a.timestamp < b.timestamp) {
return -1;
}
if (a.timeEpochMs > b.timeEpochMs) {
if (a.timestamp > b.timestamp) {
return 1;
}
@ -499,11 +499,11 @@ const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
};
const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timeEpochMs > b.timeEpochMs) {
if (a.timestamp > b.timestamp) {
return -1;
}
if (a.timeEpochMs < b.timeEpochMs) {
if (a.timestamp < b.timestamp) {
return 1;
}

View File

@ -72,9 +72,10 @@ export class ResultProcessor {
const graphInterval = this.state.queryIntervals.intervalMs;
const dataFrame = this.rawData.map(result => guessFieldTypes(toDataFrame(result)));
const newResults = this.rawData ? dataFrameToLogsModel(dataFrame, graphInterval) : null;
const sortedNewResults = sortLogsResult(newResults, this.state.refreshInterval);
if (this.replacePreviousResults) {
return newResults;
return sortedNewResults;
}
const prevLogsResult: LogsModel = this.state.logsResult || { hasUniqueLabels: false, rows: [] };
@ -86,17 +87,17 @@ export class ResultProcessor {
for (const row of rowsInState) {
processedRows.push({ ...row, fresh: false });
}
for (const row of newResults.rows) {
for (const row of sortedNewResults.rows) {
processedRows.push({ ...row, fresh: true });
}
const processedSeries = this.mergeGraphResults(newResults.series, seriesInState);
const processedSeries = this.mergeGraphResults(sortedNewResults.series, seriesInState);
const slice = -1000;
const rows = processedRows.slice(slice);
const series = processedSeries.slice(slice);
return { ...newResults, rows, series };
return { ...sortedNewResults, rows, series };
};
private makeTimeSeriesList = (rawData: any[]) => {