Infinite scroll: update deduplication method and only run log queries when scrolling (#80821)

* Logs deduplication: replace array lookup with map

* runLoadMoreLogs: only execute log queries

* Formatting

* Rename variable
This commit is contained in:
Matias Chomicki 2024-01-22 12:42:18 +01:00 committed by GitHub
parent 60ece3b21b
commit 0c9265f59b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 40 additions and 30 deletions

View File

@ -724,14 +724,17 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
let newQuerySource: Observable<ExplorePanelData>; let newQuerySource: Observable<ExplorePanelData>;
const logQueries = queryResponse.logsResult?.queries || []; const queries = queryResponse.logsResult?.queries || [];
const queries = logQueries.map((query: DataQuery) => ({ const logRefIds = queryResponse.logsFrames.map((frame) => frame.refId);
...query, const logQueries = queries
datasource: query.datasource || datasourceInstance?.getRef(), .filter((query) => logRefIds.includes(query.refId))
refId: `${infiniteScrollRefId}${query.refId}`, .map((query: DataQuery) => ({
})); ...query,
datasource: query.datasource || datasourceInstance?.getRef(),
refId: `${infiniteScrollRefId}${query.refId}`,
}));
if (!hasNonEmptyQuery(queries) || !datasourceInstance) { if (!hasNonEmptyQuery(logQueries) || !datasourceInstance) {
return; return;
} }
@ -749,7 +752,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
}, },
getFiscalYearStartMonth(getState().user) getFiscalYearStartMonth(getState().user)
); );
const transaction = buildQueryTransaction(exploreId, queries, queryOptions, range, false, timeZone, scopedVars); const transaction = buildQueryTransaction(exploreId, logQueries, queryOptions, range, false, timeZone, scopedVars);
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Loading })); dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Loading }));
@ -764,7 +767,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
queryResponse, queryResponse,
absoluteRange, absoluteRange,
undefined, undefined,
queries, logQueries,
correlations, correlations,
showCorrelationEditorLinks, showCorrelationEditorLinks,
defaultCorrelationEditorDatasource defaultCorrelationEditorDatasource

View File

@ -46,7 +46,7 @@ import { ansicolor, colors } from '@grafana/ui';
import { getThemeColor } from 'app/core/utils/colors'; import { getThemeColor } from 'app/core/utils/colors';
import { LogsFrame, parseLogsFrame } from './logsFrame'; import { LogsFrame, parseLogsFrame } from './logsFrame';
import { findMatchingRow, getLogLevel, getLogLevelFromKey, sortInAscendingOrder } from './utils'; import { createLogRowsMap, getLogLevel, getLogLevelFromKey, sortInAscendingOrder } from './utils';
export const LIMIT_LABEL = 'Line limit'; export const LIMIT_LABEL = 'Line limit';
export const COMMON_LABELS = 'Common labels'; export const COMMON_LABELS = 'Common labels';
@ -415,6 +415,7 @@ export function logSeriesToLogsModel(
let rows: LogRowModel[] = []; let rows: LogRowModel[] = [];
let hasUniqueLabels = false; let hasUniqueLabels = false;
const findMatchingRow = createLogRowsMap();
for (const info of allSeries) { for (const info of allSeries) {
const { logsFrame, rawFrame: series, frameLabels } = info; const { logsFrame, rawFrame: series, frameLabels } = info;
const { timeField, timeNanosecondField, bodyField: stringField, severityField: logLevelField, idField } = logsFrame; const { timeField, timeNanosecondField, bodyField: stringField, severityField: logLevelField, idField } = logsFrame;
@ -478,7 +479,7 @@ export function logSeriesToLogsModel(
row.rowId = idField.values[j]; row.rowId = idField.values[j];
} }
if (filterDuplicateRows && findMatchingRow(row, rows)) { if (filterDuplicateRows && findMatchingRow(row)) {
continue; continue;
} }

View File

@ -17,7 +17,7 @@ import {
calculateStats, calculateStats,
checkLogsError, checkLogsError,
escapeUnescapedString, escapeUnescapedString,
findMatchingRow, createLogRowsMap,
getLogLevel, getLogLevel,
getLogLevelFromKey, getLogLevelFromKey,
getLogsVolumeMaximumRange, getLogsVolumeMaximumRange,
@ -485,41 +485,46 @@ describe('escapeUnescapedString', () => {
describe('findMatchingRow', () => { describe('findMatchingRow', () => {
function setup(frames: DataFrame[]) { function setup(frames: DataFrame[]) {
return logSeriesToLogsModel(frames); const logsModel = logSeriesToLogsModel(frames);
const rows = logsModel?.rows || [];
const findMatchingRow = createLogRowsMap();
for (const row of rows) {
expect(findMatchingRow(row)).toBeFalsy();
}
return { rows, findMatchingRow };
} }
it('ignores rows from different queries', () => { it('ignores rows from different queries', () => {
const { logFrameA, logFrameB } = getMockFrames(); const { logFrameA, logFrameB } = getMockFrames();
logFrameA.refId = 'A'; logFrameA.refId = 'A';
logFrameB.refId = 'B'; logFrameB.refId = 'B';
const logsModel = setup([logFrameA, logFrameB]); const { rows, findMatchingRow } = setup([logFrameA, logFrameB]);
const rows = logsModel?.rows || [];
for (const row of rows) { for (const row of rows) {
const targetRow = { ...row, dataFrame: { ...logFrameA, refId: 'Z' } }; const targetRow = { ...row, dataFrame: { ...logFrameA, refId: 'Z' } };
expect(findMatchingRow(targetRow, rows)).toBe(undefined); expect(findMatchingRow(targetRow)).toBeFalsy();
} }
}); });
it('matches rows by rowId', () => { it('matches rows by rowId', () => {
const { logFrameA, logFrameB } = getMockFrames(); const { logFrameA, logFrameB } = getMockFrames();
const logsModel = setup([logFrameA, logFrameB]); const { rows, findMatchingRow } = setup([logFrameA, logFrameB]);
const rows = logsModel?.rows || [];
for (const row of rows) { for (const row of rows) {
const targetRow = { ...row, entry: `${Math.random()}`, timeEpochNs: `${Math.ceil(Math.random() * 1000000)}` }; const targetRow = { ...row, entry: `${Math.random()}`, timeEpochNs: `${Math.ceil(Math.random() * 1000000)}` };
expect(findMatchingRow(targetRow, rows)).toBeDefined(); expect(findMatchingRow(targetRow)).toBeTruthy();
} }
}); });
it('matches rows by entry and nanosecond time', () => { it('matches rows by entry and nanosecond time', () => {
const { logFrameA, logFrameB } = getMockFrames(); const { logFrameA, logFrameB } = getMockFrames();
const logsModel = setup([logFrameA, logFrameB]); logFrameA.fields[4].values = [];
const rows = logsModel?.rows || []; logFrameB.fields[4].values = [];
const { rows, findMatchingRow } = setup([logFrameA, logFrameB]);
for (const row of rows) { for (const row of rows) {
const targetRow = { ...row, rowId: undefined }; const targetRow = { ...row, rowId: undefined };
expect(findMatchingRow(targetRow, rows)).toBeDefined(); expect(findMatchingRow(targetRow)).toBeTruthy();
} }
}); });
}); });

View File

@ -298,13 +298,14 @@ export function targetIsElement(target: EventTarget | null): target is Element {
return target instanceof Element; return target instanceof Element;
} }
export function findMatchingRow(target: LogRowModel, rows: LogRowModel[]) { export function createLogRowsMap() {
return rows.find((row) => { const logRowsSet = new Set();
if (target.dataFrame.refId !== row.dataFrame.refId) { return function (target: LogRowModel): boolean {
return false; let id = `${target.dataFrame.refId}_${target.rowId ? target.rowId : `${target.timeEpochNs}_${target.entry}`}`;
if (logRowsSet.has(id)) {
return true;
} }
const sameId = target.rowId && row.rowId && target.rowId === row.rowId; logRowsSet.add(id);
const sameSignature = row.entry === target.entry && row.timeEpochNs === target.timeEpochNs; return false;
return sameId || sameSignature; };
});
} }