mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Infinite scroll: update deduplication method and only run log queries when scrolling (#80821)
* Logs deduplication: replace array lookup with map * runLoadMoreLogs: only execute log queries * Formatting * Rename variable
This commit is contained in:
parent
60ece3b21b
commit
0c9265f59b
@ -724,14 +724,17 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
|
||||
|
||||
let newQuerySource: Observable<ExplorePanelData>;
|
||||
|
||||
const logQueries = queryResponse.logsResult?.queries || [];
|
||||
const queries = logQueries.map((query: DataQuery) => ({
|
||||
...query,
|
||||
datasource: query.datasource || datasourceInstance?.getRef(),
|
||||
refId: `${infiniteScrollRefId}${query.refId}`,
|
||||
}));
|
||||
const queries = queryResponse.logsResult?.queries || [];
|
||||
const logRefIds = queryResponse.logsFrames.map((frame) => frame.refId);
|
||||
const logQueries = queries
|
||||
.filter((query) => logRefIds.includes(query.refId))
|
||||
.map((query: DataQuery) => ({
|
||||
...query,
|
||||
datasource: query.datasource || datasourceInstance?.getRef(),
|
||||
refId: `${infiniteScrollRefId}${query.refId}`,
|
||||
}));
|
||||
|
||||
if (!hasNonEmptyQuery(queries) || !datasourceInstance) {
|
||||
if (!hasNonEmptyQuery(logQueries) || !datasourceInstance) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -749,7 +752,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
|
||||
},
|
||||
getFiscalYearStartMonth(getState().user)
|
||||
);
|
||||
const transaction = buildQueryTransaction(exploreId, queries, queryOptions, range, false, timeZone, scopedVars);
|
||||
const transaction = buildQueryTransaction(exploreId, logQueries, queryOptions, range, false, timeZone, scopedVars);
|
||||
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Loading }));
|
||||
|
||||
@ -764,7 +767,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
|
||||
queryResponse,
|
||||
absoluteRange,
|
||||
undefined,
|
||||
queries,
|
||||
logQueries,
|
||||
correlations,
|
||||
showCorrelationEditorLinks,
|
||||
defaultCorrelationEditorDatasource
|
||||
|
@ -46,7 +46,7 @@ import { ansicolor, colors } from '@grafana/ui';
|
||||
import { getThemeColor } from 'app/core/utils/colors';
|
||||
|
||||
import { LogsFrame, parseLogsFrame } from './logsFrame';
|
||||
import { findMatchingRow, getLogLevel, getLogLevelFromKey, sortInAscendingOrder } from './utils';
|
||||
import { createLogRowsMap, getLogLevel, getLogLevelFromKey, sortInAscendingOrder } from './utils';
|
||||
|
||||
export const LIMIT_LABEL = 'Line limit';
|
||||
export const COMMON_LABELS = 'Common labels';
|
||||
@ -415,6 +415,7 @@ export function logSeriesToLogsModel(
|
||||
let rows: LogRowModel[] = [];
|
||||
let hasUniqueLabels = false;
|
||||
|
||||
const findMatchingRow = createLogRowsMap();
|
||||
for (const info of allSeries) {
|
||||
const { logsFrame, rawFrame: series, frameLabels } = info;
|
||||
const { timeField, timeNanosecondField, bodyField: stringField, severityField: logLevelField, idField } = logsFrame;
|
||||
@ -478,7 +479,7 @@ export function logSeriesToLogsModel(
|
||||
row.rowId = idField.values[j];
|
||||
}
|
||||
|
||||
if (filterDuplicateRows && findMatchingRow(row, rows)) {
|
||||
if (filterDuplicateRows && findMatchingRow(row)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
calculateStats,
|
||||
checkLogsError,
|
||||
escapeUnescapedString,
|
||||
findMatchingRow,
|
||||
createLogRowsMap,
|
||||
getLogLevel,
|
||||
getLogLevelFromKey,
|
||||
getLogsVolumeMaximumRange,
|
||||
@ -485,41 +485,46 @@ describe('escapeUnescapedString', () => {
|
||||
|
||||
describe('findMatchingRow', () => {
|
||||
function setup(frames: DataFrame[]) {
|
||||
return logSeriesToLogsModel(frames);
|
||||
const logsModel = logSeriesToLogsModel(frames);
|
||||
const rows = logsModel?.rows || [];
|
||||
const findMatchingRow = createLogRowsMap();
|
||||
for (const row of rows) {
|
||||
expect(findMatchingRow(row)).toBeFalsy();
|
||||
}
|
||||
return { rows, findMatchingRow };
|
||||
}
|
||||
|
||||
it('ignores rows from different queries', () => {
|
||||
const { logFrameA, logFrameB } = getMockFrames();
|
||||
logFrameA.refId = 'A';
|
||||
logFrameB.refId = 'B';
|
||||
const logsModel = setup([logFrameA, logFrameB]);
|
||||
const rows = logsModel?.rows || [];
|
||||
const { rows, findMatchingRow } = setup([logFrameA, logFrameB]);
|
||||
|
||||
for (const row of rows) {
|
||||
const targetRow = { ...row, dataFrame: { ...logFrameA, refId: 'Z' } };
|
||||
expect(findMatchingRow(targetRow, rows)).toBe(undefined);
|
||||
expect(findMatchingRow(targetRow)).toBeFalsy();
|
||||
}
|
||||
});
|
||||
|
||||
it('matches rows by rowId', () => {
|
||||
const { logFrameA, logFrameB } = getMockFrames();
|
||||
const logsModel = setup([logFrameA, logFrameB]);
|
||||
const rows = logsModel?.rows || [];
|
||||
const { rows, findMatchingRow } = setup([logFrameA, logFrameB]);
|
||||
|
||||
for (const row of rows) {
|
||||
const targetRow = { ...row, entry: `${Math.random()}`, timeEpochNs: `${Math.ceil(Math.random() * 1000000)}` };
|
||||
expect(findMatchingRow(targetRow, rows)).toBeDefined();
|
||||
expect(findMatchingRow(targetRow)).toBeTruthy();
|
||||
}
|
||||
});
|
||||
|
||||
it('matches rows by entry and nanosecond time', () => {
|
||||
const { logFrameA, logFrameB } = getMockFrames();
|
||||
const logsModel = setup([logFrameA, logFrameB]);
|
||||
const rows = logsModel?.rows || [];
|
||||
logFrameA.fields[4].values = [];
|
||||
logFrameB.fields[4].values = [];
|
||||
const { rows, findMatchingRow } = setup([logFrameA, logFrameB]);
|
||||
|
||||
for (const row of rows) {
|
||||
const targetRow = { ...row, rowId: undefined };
|
||||
expect(findMatchingRow(targetRow, rows)).toBeDefined();
|
||||
expect(findMatchingRow(targetRow)).toBeTruthy();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -298,13 +298,14 @@ export function targetIsElement(target: EventTarget | null): target is Element {
|
||||
return target instanceof Element;
|
||||
}
|
||||
|
||||
export function findMatchingRow(target: LogRowModel, rows: LogRowModel[]) {
|
||||
return rows.find((row) => {
|
||||
if (target.dataFrame.refId !== row.dataFrame.refId) {
|
||||
return false;
|
||||
export function createLogRowsMap() {
|
||||
const logRowsSet = new Set();
|
||||
return function (target: LogRowModel): boolean {
|
||||
let id = `${target.dataFrame.refId}_${target.rowId ? target.rowId : `${target.timeEpochNs}_${target.entry}`}`;
|
||||
if (logRowsSet.has(id)) {
|
||||
return true;
|
||||
}
|
||||
const sameId = target.rowId && row.rowId && target.rowId === row.rowId;
|
||||
const sameSignature = row.entry === target.entry && row.timeEpochNs === target.timeEpochNs;
|
||||
return sameId || sameSignature;
|
||||
});
|
||||
logRowsSet.add(id);
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user