Logs: improve deduplication supporting code and fix duplicated frames (#88716)

* Logs: change refId approach to deduplicate infinite scrolling rows

* Chore: update tests

* chore: add comment

* Prettier
This commit is contained in:
Matias Chomicki 2024-06-04 18:09:33 +02:00 committed by GitHub
parent a1d41a8932
commit 1cfb21a74c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 34 additions and 46 deletions

View File

@ -159,9 +159,12 @@ function shouldCombine(frame1: DataFrame, frame2: DataFrame): boolean {
// because we do not have a good "frametype" value for them yet. // because we do not have a good "frametype" value for them yet.
const customType1 = frame1.meta?.custom?.frameType; const customType1 = frame1.meta?.custom?.frameType;
const customType2 = frame2.meta?.custom?.frameType; const customType2 = frame2.meta?.custom?.frameType;
// Legacy frames have this custom type
if (customType1 === 'LabeledTimeValues' && customType2 === 'LabeledTimeValues') { if (customType1 === 'LabeledTimeValues' && customType2 === 'LabeledTimeValues') {
return true; return true;
} else if (customType1 === customType2) {
// Data plane frames don't
return true;
} }
// should never reach here // should never reach here

View File

@ -37,7 +37,6 @@ import {
} from 'app/core/utils/explore'; } from 'app/core/utils/explore';
import { getShiftedTimeRange } from 'app/core/utils/timePicker'; import { getShiftedTimeRange } from 'app/core/utils/timePicker';
import { getCorrelationsBySourceUIDs } from 'app/features/correlations/utils'; import { getCorrelationsBySourceUIDs } from 'app/features/correlations/utils';
import { infiniteScrollRefId } from 'app/features/logs/logsModel';
import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { getDatasourceSrv } from 'app/features/plugins/datasource_srv';
import { getFiscalYearStartMonth, getTimeZone } from 'app/features/profile/state/selectors'; import { getFiscalYearStartMonth, getTimeZone } from 'app/features/profile/state/selectors';
import { SupportingQueryType } from 'app/plugins/datasource/loki/types'; import { SupportingQueryType } from 'app/plugins/datasource/loki/types';
@ -57,7 +56,7 @@ import { notifyApp } from '../../../core/actions';
import { createErrorNotification } from '../../../core/copy/appNotification'; import { createErrorNotification } from '../../../core/copy/appNotification';
import { runRequest } from '../../query/state/runRequest'; import { runRequest } from '../../query/state/runRequest';
import { visualisationTypeKey } from '../Logs/utils/logs'; import { visualisationTypeKey } from '../Logs/utils/logs';
import { decorateData } from '../utils/decorators'; import { decorateData, decorateWithLogsResult } from '../utils/decorators';
import { import {
getSupplementaryQueryProvider, getSupplementaryQueryProvider,
storeSupplementaryQueryEnabled, storeSupplementaryQueryEnabled,
@ -583,8 +582,7 @@ export const runQueries = createAsyncThunk<void, RunQueriesOptions>(
decorateData( decorateData(
data, data,
queryResponse, queryResponse,
absoluteRange, decorateWithLogsResult({ absoluteRange, refreshInterval, queries }),
refreshInterval,
queries, queries,
correlations, correlations,
showCorrelationEditorLinks, showCorrelationEditorLinks,
@ -646,8 +644,7 @@ export const runQueries = createAsyncThunk<void, RunQueriesOptions>(
decorateData( decorateData(
data, data,
queryResponse, queryResponse,
absoluteRange, decorateWithLogsResult({ absoluteRange, refreshInterval, queries }),
refreshInterval,
queries, queries,
correlations, correlations,
showCorrelationEditorLinks, showCorrelationEditorLinks,
@ -754,7 +751,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
.map((query: DataQuery) => ({ .map((query: DataQuery) => ({
...query, ...query,
datasource: query.datasource || datasourceInstance?.getRef(), datasource: query.datasource || datasourceInstance?.getRef(),
refId: `${infiniteScrollRefId}${query.refId}`, refId: query.refId,
supportingQueryType: SupportingQueryType.InfiniteScroll, supportingQueryType: SupportingQueryType.InfiniteScroll,
})); }));
@ -791,8 +788,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQuer
// This shouldn't be needed after https://github.com/grafana/grafana/issues/57327 is fixed // This shouldn't be needed after https://github.com/grafana/grafana/issues/57327 is fixed
combinePanelData(queryResponse, data), combinePanelData(queryResponse, data),
queryResponse, queryResponse,
absoluteRange, decorateWithLogsResult({ absoluteRange, queries, deduplicate: true }),
undefined,
logQueries, logQueries,
correlations, correlations,
showCorrelationEditorLinks, showCorrelationEditorLinks,

View File

@ -264,6 +264,7 @@ export const decorateWithLogsResult =
absoluteRange?: AbsoluteTimeRange; absoluteRange?: AbsoluteTimeRange;
refreshInterval?: string; refreshInterval?: string;
queries?: DataQuery[]; queries?: DataQuery[];
deduplicate?: boolean;
} = {} } = {}
) => ) =>
(data: ExplorePanelData): ExplorePanelData => { (data: ExplorePanelData): ExplorePanelData => {
@ -272,7 +273,13 @@ export const decorateWithLogsResult =
} }
const intervalMs = data.request?.intervalMs; const intervalMs = data.request?.intervalMs;
const newResults = dataFrameToLogsModel(data.logsFrames, intervalMs, options.absoluteRange, options.queries); const newResults = dataFrameToLogsModel(
data.logsFrames,
intervalMs,
options.absoluteRange,
options.queries,
options.deduplicate
);
const sortOrder = refreshIntervalToSortOrder(options.refreshInterval); const sortOrder = refreshIntervalToSortOrder(options.refreshInterval);
const sortedNewResults = sortLogsResult(newResults, sortOrder); const sortedNewResults = sortLogsResult(newResults, sortOrder);
const rows = sortedNewResults.rows; const rows = sortedNewResults.rows;
@ -286,8 +293,7 @@ export const decorateWithLogsResult =
export function decorateData( export function decorateData(
data: PanelData, data: PanelData,
queryResponse: PanelData, queryResponse: PanelData,
absoluteRange: AbsoluteTimeRange, logsResultDecorator: (data: ExplorePanelData) => ExplorePanelData,
refreshInterval: string | undefined,
queries: DataQuery[] | undefined, queries: DataQuery[] | undefined,
correlations: CorrelationData[] | undefined, correlations: CorrelationData[] | undefined,
showCorrelationEditorLinks: boolean, showCorrelationEditorLinks: boolean,
@ -305,7 +311,7 @@ export function decorateData(
), ),
map(decorateWithFrameTypeMetadata), map(decorateWithFrameTypeMetadata),
map(decorateWithGraphResult), map(decorateWithGraphResult),
map(decorateWithLogsResult({ absoluteRange, refreshInterval, queries })), map(logsResultDecorator),
mergeMap(decorateWithRawPrometheusResult), mergeMap(decorateWithRawPrometheusResult),
mergeMap(decorateWithTableResult) mergeMap(decorateWithTableResult)
); );

View File

@ -31,7 +31,6 @@ import {
dedupLogRows, dedupLogRows,
filterLogLevels, filterLogLevels,
getSeriesProperties, getSeriesProperties,
infiniteScrollRefId,
LIMIT_LABEL, LIMIT_LABEL,
logRowToSingleRowDataFrame, logRowToSingleRowDataFrame,
logSeriesToLogsModel, logSeriesToLogsModel,
@ -1023,12 +1022,12 @@ describe('dataFrameToLogsModel', () => {
let frameA: DataFrame, frameB: DataFrame; let frameA: DataFrame, frameB: DataFrame;
beforeEach(() => { beforeEach(() => {
const { logFrameA, logFrameB } = getMockFrames(); const { logFrameA, logFrameB } = getMockFrames();
logFrameA.refId = `${infiniteScrollRefId}-A`; logFrameA.refId = `A`;
logFrameA.fields[0].values = [1, 1]; logFrameA.fields[0].values = [1, 1];
logFrameA.fields[1].values = ['line', 'line']; logFrameA.fields[1].values = ['line', 'line'];
logFrameA.fields[3].values = ['3000000', '3000000']; logFrameA.fields[3].values = ['3000000', '3000000'];
logFrameA.fields[4].values = ['id', 'id']; logFrameA.fields[4].values = ['id', 'id'];
logFrameB.refId = `${infiniteScrollRefId}-B`; logFrameB.refId = `B`;
logFrameB.fields[0].values = [2, 2]; logFrameB.fields[0].values = [2, 2];
logFrameB.fields[1].values = ['line 2', 'line 2']; logFrameB.fields[1].values = ['line 2', 'line 2'];
logFrameB.fields[3].values = ['4000000', '4000000']; logFrameB.fields[3].values = ['4000000', '4000000'];
@ -1037,18 +1036,21 @@ describe('dataFrameToLogsModel', () => {
frameB = logFrameB; frameB = logFrameB;
}); });
it('deduplicates repeated log frames when invoked from infinite scrolling results', () => { it('deduplicates repeated log frames when called with deduplicate', () => {
const logsModel = dataFrameToLogsModel([frameA, frameB], 1, { from: 1556270591353, to: 1556289770991 }, [ const logsModel = dataFrameToLogsModel(
{ refId: `${infiniteScrollRefId}-A` }, [frameA, frameB],
{ refId: `${infiniteScrollRefId}-B` }, 1,
]); { from: 1556270591353, to: 1556289770991 },
[{ refId: `A` }, { refId: `B` }],
true
);
expect(logsModel.rows).toHaveLength(2); expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows[0].entry).toBe(frameA.fields[1].values[0]); expect(logsModel.rows[0].entry).toBe(frameA.fields[1].values[0]);
expect(logsModel.rows[1].entry).toBe(frameB.fields[1].values[0]); expect(logsModel.rows[1].entry).toBe(frameB.fields[1].values[0]);
}); });
it('does not remove repeated log frames when invoked from other contexts', () => { it('does not remove repeated log frames when invoked without deduplicate', () => {
frameA.refId = 'A'; frameA.refId = 'A';
frameB.refId = 'B'; frameB.refId = 'B';
const logsModel = dataFrameToLogsModel([frameA, frameB], 1, { from: 1556270591353, to: 1556289770991 }, [ const logsModel = dataFrameToLogsModel([frameA, frameB], 1, { from: 1556270591353, to: 1556289770991 }, [

View File

@ -201,8 +201,6 @@ function isLogsData(series: DataFrame) {
return series.fields.some((f) => f.type === FieldType.time) && series.fields.some((f) => f.type === FieldType.string); return series.fields.some((f) => f.type === FieldType.time) && series.fields.some((f) => f.type === FieldType.string);
} }
export const infiniteScrollRefId = 'infinite-scroll-';
/** /**
* Convert dataFrame into LogsModel which consists of creating separate array of log rows and metrics series. Metrics * Convert dataFrame into LogsModel which consists of creating separate array of log rows and metrics series. Metrics
* series can be either already included in the dataFrame or will be computed from the log rows. * series can be either already included in the dataFrame or will be computed from the log rows.
@ -215,29 +213,12 @@ export function dataFrameToLogsModel(
dataFrame: DataFrame[], dataFrame: DataFrame[],
intervalMs?: number, intervalMs?: number,
absoluteRange?: AbsoluteTimeRange, absoluteRange?: AbsoluteTimeRange,
queries?: DataQuery[] queries?: DataQuery[],
deduplicateResults?: boolean
): LogsModel { ): LogsModel {
// Until nanosecond precision for requests is supported, we need to account for possible duplicate rows.
let infiniteScrollingResults = false;
queries = queries?.map((query) => {
if (query.refId.includes(infiniteScrollRefId)) {
infiniteScrollingResults = true;
return {
...query,
refId: query.refId.replace(infiniteScrollRefId, ''),
};
}
return query;
});
if (infiniteScrollingResults) {
dataFrame = dataFrame.map((frame) => ({
...frame,
refId: frame.refId?.replace(infiniteScrollRefId, ''),
}));
}
const { logSeries } = separateLogsAndMetrics(dataFrame); const { logSeries } = separateLogsAndMetrics(dataFrame);
const logsModel = logSeriesToLogsModel(logSeries, queries, infiniteScrollingResults); // Until nanosecond precision for requests is supported, we need to account for possible duplicate rows.
const logsModel = logSeriesToLogsModel(logSeries, queries, Boolean(deduplicateResults));
if (logsModel) { if (logsModel) {
// Create histogram metrics from logs using the interval as bucket size for the line count // Create histogram metrics from logs using the interval as bucket size for the line count