diff --git a/packages/grafana-o11y-ds-frontend/src/combineResponses.ts b/packages/grafana-o11y-ds-frontend/src/combineResponses.ts index 05e290663ed..0a22f085709 100644 --- a/packages/grafana-o11y-ds-frontend/src/combineResponses.ts +++ b/packages/grafana-o11y-ds-frontend/src/combineResponses.ts @@ -159,9 +159,12 @@ function shouldCombine(frame1: DataFrame, frame2: DataFrame): boolean { // because we do not have a good "frametype" value for them yet. const customType1 = frame1.meta?.custom?.frameType; const customType2 = frame2.meta?.custom?.frameType; - + // Legacy frames have this custom type if (customType1 === 'LabeledTimeValues' && customType2 === 'LabeledTimeValues') { return true; + } else if (customType1 === customType2) { + // Data plane frames don't + return true; } // should never reach here diff --git a/public/app/features/explore/state/query.ts b/public/app/features/explore/state/query.ts index 3a05fdc13f0..a4b7e92922b 100644 --- a/public/app/features/explore/state/query.ts +++ b/public/app/features/explore/state/query.ts @@ -37,7 +37,6 @@ import { } from 'app/core/utils/explore'; import { getShiftedTimeRange } from 'app/core/utils/timePicker'; import { getCorrelationsBySourceUIDs } from 'app/features/correlations/utils'; -import { infiniteScrollRefId } from 'app/features/logs/logsModel'; import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { getFiscalYearStartMonth, getTimeZone } from 'app/features/profile/state/selectors'; import { SupportingQueryType } from 'app/plugins/datasource/loki/types'; @@ -57,7 +56,7 @@ import { notifyApp } from '../../../core/actions'; import { createErrorNotification } from '../../../core/copy/appNotification'; import { runRequest } from '../../query/state/runRequest'; import { visualisationTypeKey } from '../Logs/utils/logs'; -import { decorateData } from '../utils/decorators'; +import { decorateData, decorateWithLogsResult } from '../utils/decorators'; import { getSupplementaryQueryProvider, storeSupplementaryQueryEnabled, @@ -583,8 +582,7 @@ export const runQueries = createAsyncThunk( decorateData( data, queryResponse, - absoluteRange, - refreshInterval, + decorateWithLogsResult({ absoluteRange, refreshInterval, queries }), queries, correlations, showCorrelationEditorLinks, @@ -646,8 +644,7 @@ export const runQueries = createAsyncThunk( decorateData( data, queryResponse, - absoluteRange, - refreshInterval, + decorateWithLogsResult({ absoluteRange, refreshInterval, queries }), queries, correlations, showCorrelationEditorLinks, @@ -754,7 +751,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk ({ ...query, datasource: query.datasource || datasourceInstance?.getRef(), - refId: `${infiniteScrollRefId}${query.refId}`, + refId: query.refId, supportingQueryType: SupportingQueryType.InfiniteScroll, })); @@ -791,8 +788,7 @@ export const runLoadMoreLogsQueries = createAsyncThunk (data: ExplorePanelData): ExplorePanelData => { @@ -272,7 +273,13 @@ export const decorateWithLogsResult = } const intervalMs = data.request?.intervalMs; - const newResults = dataFrameToLogsModel(data.logsFrames, intervalMs, options.absoluteRange, options.queries); + const newResults = dataFrameToLogsModel( + data.logsFrames, + intervalMs, + options.absoluteRange, + options.queries, + options.deduplicate + ); const sortOrder = refreshIntervalToSortOrder(options.refreshInterval); const sortedNewResults = sortLogsResult(newResults, sortOrder); const rows = sortedNewResults.rows; @@ -286,8 +293,7 @@ export const decorateWithLogsResult = export function decorateData( data: PanelData, queryResponse: PanelData, - absoluteRange: AbsoluteTimeRange, - refreshInterval: string | undefined, + logsResultDecorator: (data: ExplorePanelData) => ExplorePanelData, queries: DataQuery[] | undefined, correlations: CorrelationData[] | undefined, showCorrelationEditorLinks: boolean, @@ -305,7 +311,7 @@ export function decorateData( ), map(decorateWithFrameTypeMetadata), map(decorateWithGraphResult), - map(decorateWithLogsResult({ absoluteRange, refreshInterval, queries })), + map(logsResultDecorator), mergeMap(decorateWithRawPrometheusResult), mergeMap(decorateWithTableResult) ); diff --git a/public/app/features/logs/logsModel.test.ts b/public/app/features/logs/logsModel.test.ts index 05883b14849..f6922147ddd 100644 --- a/public/app/features/logs/logsModel.test.ts +++ b/public/app/features/logs/logsModel.test.ts @@ -31,7 +31,6 @@ import { dedupLogRows, filterLogLevels, getSeriesProperties, - infiniteScrollRefId, LIMIT_LABEL, logRowToSingleRowDataFrame, logSeriesToLogsModel, @@ -1023,12 +1022,12 @@ describe('dataFrameToLogsModel', () => { let frameA: DataFrame, frameB: DataFrame; beforeEach(() => { const { logFrameA, logFrameB } = getMockFrames(); - logFrameA.refId = `${infiniteScrollRefId}-A`; + logFrameA.refId = `A`; logFrameA.fields[0].values = [1, 1]; logFrameA.fields[1].values = ['line', 'line']; logFrameA.fields[3].values = ['3000000', '3000000']; logFrameA.fields[4].values = ['id', 'id']; - logFrameB.refId = `${infiniteScrollRefId}-B`; + logFrameB.refId = `B`; logFrameB.fields[0].values = [2, 2]; logFrameB.fields[1].values = ['line 2', 'line 2']; logFrameB.fields[3].values = ['4000000', '4000000']; @@ -1037,18 +1036,21 @@ describe('dataFrameToLogsModel', () => { frameB = logFrameB; }); - it('deduplicates repeated log frames when invoked from infinite scrolling results', () => { - const logsModel = dataFrameToLogsModel([frameA, frameB], 1, { from: 1556270591353, to: 1556289770991 }, [ - { refId: `${infiniteScrollRefId}-A` }, - { refId: `${infiniteScrollRefId}-B` }, - ]); + it('deduplicates repeated log frames when called with deduplicate', () => { + const logsModel = dataFrameToLogsModel( + [frameA, frameB], + 1, + { from: 1556270591353, to: 1556289770991 }, + [{ refId: `A` }, { refId: `B` }], + true + ); expect(logsModel.rows).toHaveLength(2); expect(logsModel.rows[0].entry).toBe(frameA.fields[1].values[0]); expect(logsModel.rows[1].entry).toBe(frameB.fields[1].values[0]); }); - it('does not remove repeated log frames when invoked from other contexts', () => { + it('does not remove repeated log frames when invoked without deduplicate', () => { frameA.refId = 'A'; frameB.refId = 'B'; const logsModel = dataFrameToLogsModel([frameA, frameB], 1, { from: 1556270591353, to: 1556289770991 }, [ diff --git a/public/app/features/logs/logsModel.ts b/public/app/features/logs/logsModel.ts index d4a11460ab6..b76f8f81dc4 100644 --- a/public/app/features/logs/logsModel.ts +++ b/public/app/features/logs/logsModel.ts @@ -201,8 +201,6 @@ function isLogsData(series: DataFrame) { return series.fields.some((f) => f.type === FieldType.time) && series.fields.some((f) => f.type === FieldType.string); } -export const infiniteScrollRefId = 'infinite-scroll-'; - /** * Convert dataFrame into LogsModel which consists of creating separate array of log rows and metrics series. Metrics * series can be either already included in the dataFrame or will be computed from the log rows. @@ -215,29 +213,12 @@ export function dataFrameToLogsModel( dataFrame: DataFrame[], intervalMs?: number, absoluteRange?: AbsoluteTimeRange, - queries?: DataQuery[] + queries?: DataQuery[], + deduplicateResults?: boolean ): LogsModel { - // Until nanosecond precision for requests is supported, we need to account for possible duplicate rows. - let infiniteScrollingResults = false; - queries = queries?.map((query) => { - if (query.refId.includes(infiniteScrollRefId)) { - infiniteScrollingResults = true; - return { - ...query, - refId: query.refId.replace(infiniteScrollRefId, ''), - }; - } - return query; - }); - if (infiniteScrollingResults) { - dataFrame = dataFrame.map((frame) => ({ - ...frame, - refId: frame.refId?.replace(infiniteScrollRefId, ''), - })); - } - const { logSeries } = separateLogsAndMetrics(dataFrame); - const logsModel = logSeriesToLogsModel(logSeries, queries, infiniteScrollingResults); + // Until nanosecond precision for requests is supported, we need to account for possible duplicate rows. + const logsModel = logSeriesToLogsModel(logSeries, queries, Boolean(deduplicateResults)); if (logsModel) { // Create histogram metrics from logs using the interval as bucket size for the line count