mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Logs: add infinite scrolling to Explore (#76348)
* Explore: propose action, thunk, and decorators for load more * LogsContainer: add loadMore method * Query: remove unused var * Loading more: use navigation to simulate scrolling * Explore: figure out data combination * Fix imports * Explore: deduplicate results when using query splitting * LogsNavigation: add scroll behavior * Remove old code * Scroll: adjust delta value * Load more: remove refIds from signature We can resolve them inside Explore state * Load more: rename to loadMoreLogs * Infinite scrolling: use scrollElement to listen to scrolling events * Explore logs: add fixed height to scrollable logs container * Logs: make logs container the scrolling element * Logs: remove dynamic logs container size It works very well with 1 query, but breaks with more than 1 query or when Logs is not the last rendered panel * Logs navigation: revert changes * Infinite scroll: create component * Infinite scroll: refactor and clean up effect * Infinite scroll: support oldest first scrolling direction * Infinite scroll: support loading oldest logs in ascending and descending order * Infinite scroll: use scroll to top from logs navigation * Logs: make logs container smaller * Logs: make container smaller * State: integrate explore's loading states * Infinite scroll: add loading to effect dependency array * Infinite scroll: display message when scroll limit is reached * Infinite scroll: add support to scroll in both directions * Infinite scroll: capture wheel events for top scroll * scrollableLogsContainer: deprecate in favor of logsInfiniteScrolling * Infinite scroll: implement timerange limits * Infinite scroll: pass timezone * Fix unused variables and imports * Infinite scroll: implement timerange limits for absolute time * Infinite scroll: fix timerange limits for absolute and relative times * Infinite scroll: reset out-of-bounds message * Logs: make container taller * Line limit: use "displayed" instead of "returned" for infinite scrolling * Infinite scrolling: disable behavior when there is no scroll * Remove console log * Infinite scroll: hide limit reached message when using relative time * Logs: migrate styles to object notation * Prettier formatting * LogsModel: fix import order * Update betterer.results * Logs: remove exploreScrollableLogsContainer test * Infinite scroll: display loader * Infinite scroll: improve wheel handling * Explore: unify correlations code * Explore: move new function to helpers * Remove comment * Fix imports * Formatting * Query: add missing awaits in unit test * Logs model: add unit test * Combine frames: move code to feature/logs * Explore: move getCorrelations call back to query It was causing a weird test failure * Fix imports * Infinite scroll: parametrize scrolling threshold * Logs: fix overflow css * Infinite scroll: add basic unit test * Infinite scroll: add unit test for absolute time ranges * Formatting * Explore query: add custom interaction for scrolling * Query: move correlations before update time * Fix import in test * Update comment * Remove comment * Remove comment * Infinite scroll: report interactions from component * Fix import order * Rename action * Infinite scroll: update limit reached message * Explore logs: remove type assertion * Update betterer
This commit is contained in:
parent
df513c870f
commit
aa670280fc
@ -3214,19 +3214,7 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"]
|
||||
],
|
||||
"public/app/features/explore/Logs/Logs.tsx:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "4"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "5"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "6"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "7"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "8"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "9"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "10"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "11"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "12"]
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/features/explore/Logs/LogsMetaRow.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
@ -3248,8 +3236,7 @@ exports[`better eslint`] = {
|
||||
"public/app/features/explore/Logs/LogsSamplePanel.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "3"]
|
||||
[0, 0, 0, "Styles should be written using objects.", "2"]
|
||||
],
|
||||
"public/app/features/explore/Logs/LogsVolumePanel.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
|
@ -163,46 +163,6 @@ describe('Logs', () => {
|
||||
window.innerHeight = originalInnerHeight;
|
||||
});
|
||||
|
||||
describe('when `exploreScrollableLogsContainer` is set', () => {
|
||||
let featureToggle: boolean | undefined;
|
||||
beforeEach(() => {
|
||||
featureToggle = config.featureToggles.exploreScrollableLogsContainer;
|
||||
config.featureToggles.exploreScrollableLogsContainer = true;
|
||||
});
|
||||
afterEach(() => {
|
||||
config.featureToggles.exploreScrollableLogsContainer = featureToggle;
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should call `this.state.logsContainer.scroll`', () => {
|
||||
const scrollIntoViewSpy = jest.spyOn(window.HTMLElement.prototype, 'scrollIntoView');
|
||||
jest.spyOn(window.HTMLElement.prototype, 'scrollTop', 'get').mockReturnValue(920);
|
||||
const scrollSpy = jest.spyOn(window.HTMLElement.prototype, 'scroll');
|
||||
|
||||
const logs = [];
|
||||
for (let i = 0; i < 50; i++) {
|
||||
logs.push(makeLog({ uid: `uid${i}`, rowId: `id${i}`, timeEpochMs: i }));
|
||||
}
|
||||
|
||||
setup({ panelState: { logs: { id: 'uid47' } } }, undefined, logs);
|
||||
|
||||
expect(scrollIntoViewSpy).toBeCalledTimes(1);
|
||||
// element.getBoundingClientRect().top will always be 0 for jsdom
|
||||
// calc will be `this.state.logsContainer.scrollTop - window.innerHeight / 2` -> 920 - 500 = 420
|
||||
expect(scrollSpy).toBeCalledWith({ behavior: 'smooth', top: 420 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('when `exploreScrollableLogsContainer` is not set', () => {
|
||||
let featureToggle: boolean | undefined;
|
||||
beforeEach(() => {
|
||||
featureToggle = config.featureToggles.exploreScrollableLogsContainer;
|
||||
config.featureToggles.exploreScrollableLogsContainer = false;
|
||||
});
|
||||
afterEach(() => {
|
||||
config.featureToggles.exploreScrollableLogsContainer = featureToggle;
|
||||
});
|
||||
|
||||
it('should call `scrollElement.scroll`', () => {
|
||||
const logs = [];
|
||||
for (let i = 0; i < 50; i++) {
|
||||
@ -223,7 +183,6 @@ describe('Logs', () => {
|
||||
expect(scrollElementMock.scroll).toBeCalledWith({ behavior: 'smooth', top: 420 });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should render logs', () => {
|
||||
setup();
|
||||
|
@ -47,6 +47,8 @@ import {
|
||||
} from '@grafana/ui';
|
||||
import store from 'app/core/store';
|
||||
import { createAndCopyShortLink } from 'app/core/utils/shortLinks';
|
||||
import { InfiniteScroll } from 'app/features/logs/components/InfiniteScroll';
|
||||
import { getLogLevelFromKey } from 'app/features/logs/utils';
|
||||
import { dispatch, getState } from 'app/store/store';
|
||||
|
||||
import { ExploreItemState } from '../../../types';
|
||||
@ -108,6 +110,7 @@ interface Props extends Themeable2 {
|
||||
range: TimeRange;
|
||||
onClickFilterValue?: (value: string, refId?: string) => void;
|
||||
onClickFilterOutValue?: (value: string, refId?: string) => void;
|
||||
loadMoreLogs?(range: AbsoluteTimeRange): void;
|
||||
}
|
||||
|
||||
export type LogsVisualisationType = 'table' | 'logs';
|
||||
@ -130,8 +133,6 @@ interface State {
|
||||
logsContainer?: HTMLDivElement;
|
||||
}
|
||||
|
||||
const scrollableLogsContainer = config.featureToggles.exploreScrollableLogsContainer;
|
||||
|
||||
// we need to define the order of these explicitly
|
||||
const DEDUP_OPTIONS = [
|
||||
LogsDedupStrategy.none,
|
||||
@ -203,6 +204,7 @@ class UnthemedLogs extends PureComponent<Props, State> {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
updatePanelState = (logsPanelState: Partial<ExploreLogsPanelState>) => {
|
||||
const state: ExploreItemState | undefined = getState().explore.panes[this.props.exploreId];
|
||||
if (state?.panelsState) {
|
||||
@ -346,7 +348,7 @@ class UnthemedLogs extends PureComponent<Props, State> {
|
||||
};
|
||||
|
||||
onToggleLogLevel = (hiddenRawLevels: string[]) => {
|
||||
const hiddenLogLevels = hiddenRawLevels.map((level) => LogLevel[level as LogLevel]);
|
||||
const hiddenLogLevels = hiddenRawLevels.map((level) => getLogLevelFromKey(level));
|
||||
this.setState({ hiddenLogLevels });
|
||||
};
|
||||
|
||||
@ -471,7 +473,7 @@ class UnthemedLogs extends PureComponent<Props, State> {
|
||||
};
|
||||
|
||||
scrollIntoView = (element: HTMLElement) => {
|
||||
if (config.featureToggles.exploreScrollableLogsContainer) {
|
||||
if (config.featureToggles.logsInfiniteScrolling) {
|
||||
if (this.state.logsContainer) {
|
||||
this.topLogsRef.current?.scrollIntoView();
|
||||
this.state.logsContainer.scroll({
|
||||
@ -521,16 +523,15 @@ class UnthemedLogs extends PureComponent<Props, State> {
|
||||
});
|
||||
|
||||
scrollToTopLogs = () => {
|
||||
if (config.featureToggles.exploreScrollableLogsContainer) {
|
||||
if (config.featureToggles.logsInfiniteScrolling) {
|
||||
if (this.state.logsContainer) {
|
||||
this.state.logsContainer.scroll({
|
||||
behavior: 'auto',
|
||||
top: 0,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
this.topLogsRef.current?.scrollIntoView();
|
||||
}
|
||||
this.topLogsRef.current?.scrollIntoView();
|
||||
};
|
||||
|
||||
render() {
|
||||
@ -560,6 +561,7 @@ class UnthemedLogs extends PureComponent<Props, State> {
|
||||
getRowContext,
|
||||
getLogRowContextUi,
|
||||
getRowContextQuery,
|
||||
loadMoreLogs,
|
||||
} = this.props;
|
||||
|
||||
const {
|
||||
@ -784,7 +786,20 @@ class UnthemedLogs extends PureComponent<Props, State> {
|
||||
</div>
|
||||
)}
|
||||
{this.state.visualisationType === 'logs' && hasData && (
|
||||
<div className={styles.logRows} data-testid="logRows" ref={this.onLogsContainerRef}>
|
||||
<div
|
||||
className={config.featureToggles.logsInfiniteScrolling ? styles.scrollableLogRows : styles.logRows}
|
||||
data-testid="logRows"
|
||||
ref={this.onLogsContainerRef}
|
||||
>
|
||||
<InfiniteScroll
|
||||
loading={loading}
|
||||
loadMoreLogs={loadMoreLogs}
|
||||
range={this.props.range}
|
||||
timeZone={timeZone}
|
||||
rows={logRows}
|
||||
scrollElement={this.state.logsContainer}
|
||||
sortOrder={logsSortOrder}
|
||||
>
|
||||
<LogRows
|
||||
logRows={logRows}
|
||||
deduplicatedRows={dedupedRows}
|
||||
@ -816,6 +831,7 @@ class UnthemedLogs extends PureComponent<Props, State> {
|
||||
onClickFilterValue={this.props.onClickFilterValue}
|
||||
onClickFilterOutValue={this.props.onClickFilterOutValue}
|
||||
/>
|
||||
</InfiniteScroll>
|
||||
</div>
|
||||
)}
|
||||
{!loading && !hasData && !scanning && (
|
||||
@ -861,61 +877,65 @@ export const Logs = withTheme2(UnthemedLogs);
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2, wrapLogMessage: boolean, tableHeight: number) => {
|
||||
return {
|
||||
noData: css`
|
||||
> * {
|
||||
margin-left: 0.5em;
|
||||
}
|
||||
`,
|
||||
logOptions: css`
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: baseline;
|
||||
flex-wrap: wrap;
|
||||
background-color: ${theme.colors.background.primary};
|
||||
padding: ${theme.spacing(1, 2)};
|
||||
border-radius: ${theme.shape.radius.default};
|
||||
margin: ${theme.spacing(0, 0, 1)};
|
||||
border: 1px solid ${theme.colors.border.medium};
|
||||
`,
|
||||
headerButton: css`
|
||||
margin: ${theme.spacing(0.5, 0, 0, 1)};
|
||||
`,
|
||||
horizontalInlineLabel: css`
|
||||
> label {
|
||||
margin-right: 0;
|
||||
}
|
||||
`,
|
||||
horizontalInlineSwitch: css`
|
||||
padding: 0 ${theme.spacing(1)} 0 0;
|
||||
`,
|
||||
radioButtons: css`
|
||||
margin: 0;
|
||||
`,
|
||||
logsSection: css`
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
`,
|
||||
noData: css({
|
||||
'& > *': {
|
||||
marginLeft: '0.5em',
|
||||
},
|
||||
}),
|
||||
logOptions: css({
|
||||
display: 'flex',
|
||||
justifyContent: 'space-between',
|
||||
alignItems: 'baseline',
|
||||
flexWrap: 'wrap',
|
||||
backgroundColor: theme.colors.background.primary,
|
||||
padding: `${theme.spacing(1)} ${theme.spacing(2)}`,
|
||||
borderRadius: theme.shape.radius.default,
|
||||
margin: `${theme.spacing(0, 0, 1)}`,
|
||||
border: `1px solid ${theme.colors.border.medium}`,
|
||||
}),
|
||||
headerButton: css({
|
||||
margin: `${theme.spacing(0.5, 0, 0, 1)}`,
|
||||
}),
|
||||
horizontalInlineLabel: css({
|
||||
'& > label': {
|
||||
marginRight: '0',
|
||||
},
|
||||
}),
|
||||
horizontalInlineSwitch: css({
|
||||
padding: `0 ${theme.spacing(1)} 0 0`,
|
||||
}),
|
||||
radioButtons: css({
|
||||
margin: '0',
|
||||
}),
|
||||
logsSection: css({
|
||||
display: 'flex',
|
||||
flexDirection: 'row',
|
||||
justifyContent: 'space-between',
|
||||
}),
|
||||
logsTable: css({
|
||||
maxHeight: `${tableHeight}px`,
|
||||
}),
|
||||
logRows: css`
|
||||
overflow-x: ${scrollableLogsContainer ? 'scroll;' : `${wrapLogMessage ? 'unset' : 'scroll'};`}
|
||||
overflow-y: visible;
|
||||
width: 100%;
|
||||
${scrollableLogsContainer && 'max-height: calc(100vh - 170px);'}
|
||||
`,
|
||||
visualisationType: css`
|
||||
display: flex;
|
||||
flex: 1;
|
||||
justify-content: space-between;
|
||||
`,
|
||||
visualisationTypeRadio: css`
|
||||
margin: 0 0 0 ${theme.spacing(1)};
|
||||
`,
|
||||
stickyNavigation: css`
|
||||
${scrollableLogsContainer && 'margin-bottom: 0px'}
|
||||
overflow: visible;
|
||||
`,
|
||||
scrollableLogRows: css({
|
||||
overflowY: 'scroll',
|
||||
width: '100%',
|
||||
maxHeight: '75vh',
|
||||
}),
|
||||
logRows: css({
|
||||
overflowX: `${wrapLogMessage ? 'unset' : 'scroll'}`,
|
||||
overflowY: 'visible',
|
||||
width: '100%',
|
||||
}),
|
||||
visualisationType: css({
|
||||
display: 'flex',
|
||||
flex: '1',
|
||||
justifyContent: 'space-between',
|
||||
}),
|
||||
visualisationTypeRadio: css({
|
||||
margin: `0 0 0 ${theme.spacing(1)}`,
|
||||
}),
|
||||
stickyNavigation: css({
|
||||
overflow: 'visible',
|
||||
...(config.featureToggles.logsInfiniteScrolling && { marginBottom: '0px' }),
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
@ -36,7 +36,7 @@ import {
|
||||
selectIsWaitingForData,
|
||||
setSupplementaryQueryEnabled,
|
||||
} from '../state/query';
|
||||
import { updateTimeRange } from '../state/time';
|
||||
import { updateTimeRange, loadMoreLogs } from '../state/time';
|
||||
import { LiveTailControls } from '../useLiveTailControls';
|
||||
import { getFieldLinksForExplore } from '../utils/links';
|
||||
|
||||
@ -140,6 +140,11 @@ class LogsContainer extends PureComponent<LogsContainerProps, LogsContainerState
|
||||
updateTimeRange({ exploreId, absoluteRange });
|
||||
};
|
||||
|
||||
loadMoreLogs = (absoluteRange: AbsoluteTimeRange) => {
|
||||
const { exploreId, loadMoreLogs } = this.props;
|
||||
loadMoreLogs({ exploreId, absoluteRange });
|
||||
};
|
||||
|
||||
private getQuery(
|
||||
logsQueries: DataQuery[] | undefined,
|
||||
row: LogRowModel,
|
||||
@ -322,6 +327,7 @@ class LogsContainer extends PureComponent<LogsContainerProps, LogsContainerState
|
||||
loadingState={loadingState}
|
||||
loadLogsVolumeData={() => loadSupplementaryQueryData(exploreId, SupplementaryQueryType.LogsVolume)}
|
||||
onChangeTime={this.onChangeTime}
|
||||
loadMoreLogs={this.loadMoreLogs}
|
||||
onClickFilterLabel={this.logDetailsFilterAvailable() ? onClickFilterLabel : undefined}
|
||||
onClickFilterOutLabel={this.logDetailsFilterAvailable() ? onClickFilterOutLabel : undefined}
|
||||
onStartScanning={onStartScanning}
|
||||
@ -395,6 +401,7 @@ function mapStateToProps(state: StoreState, { exploreId }: { exploreId: string }
|
||||
|
||||
const mapDispatchToProps = {
|
||||
updateTimeRange,
|
||||
loadMoreLogs,
|
||||
addResultsToCache,
|
||||
clearCache,
|
||||
loadSupplementaryQueryData,
|
||||
|
@ -3,7 +3,7 @@ import { isEqual } from 'lodash';
|
||||
import React, { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
|
||||
import { AbsoluteTimeRange, GrafanaTheme2, LogsSortOrder } from '@grafana/data';
|
||||
import { reportInteraction } from '@grafana/runtime';
|
||||
import { config, reportInteraction } from '@grafana/runtime';
|
||||
import { DataQuery, TimeZone } from '@grafana/schema';
|
||||
import { Button, Icon, Spinner, useTheme2 } from '@grafana/ui';
|
||||
import { TOP_BAR_LEVEL_HEIGHT } from 'app/core/components/AppChrome/types';
|
||||
@ -175,6 +175,8 @@ function LogsNavigation({
|
||||
|
||||
return (
|
||||
<div className={styles.navContainer}>
|
||||
{!config.featureToggles.logsInfiniteScrolling && (
|
||||
<>
|
||||
{oldestLogsFirst ? olderLogsButton : newerLogsButton}
|
||||
<LogsNavigationPages
|
||||
pages={pages}
|
||||
@ -185,6 +187,8 @@ function LogsNavigation({
|
||||
onClick={onPageClick}
|
||||
/>
|
||||
{oldestLogsFirst ? newerLogsButton : olderLogsButton}
|
||||
</>
|
||||
)}
|
||||
<Button
|
||||
data-testid="scrollToTop"
|
||||
className={styles.scrollToTopButton}
|
||||
@ -207,7 +211,9 @@ const getStyles = (theme: GrafanaTheme2, oldestLogsFirst: boolean) => {
|
||||
max-height: ${navContainerHeight};
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: ${oldestLogsFirst ? 'flex-start' : 'space-between'};
|
||||
${config.featureToggles.logsInfiniteScrolling
|
||||
? `justify-content: flex-end;`
|
||||
: `justify-content: ${oldestLogsFirst ? 'flex-start' : 'space-between'};`}
|
||||
position: sticky;
|
||||
top: ${theme.spacing(2)};
|
||||
right: 0;
|
||||
|
@ -11,7 +11,7 @@ import {
|
||||
SplitOpen,
|
||||
SupplementaryQueryType,
|
||||
} from '@grafana/data';
|
||||
import { config, reportInteraction } from '@grafana/runtime';
|
||||
import { reportInteraction } from '@grafana/runtime';
|
||||
import { DataQuery, TimeZone } from '@grafana/schema';
|
||||
import { Button, Collapse, Icon, Tooltip, useStyles2 } from '@grafana/ui';
|
||||
import store from 'app/core/store';
|
||||
@ -107,7 +107,6 @@ export function LogsSamplePanel(props: Props) {
|
||||
|
||||
return queryResponse?.state !== LoadingState.NotStarted ? (
|
||||
<Collapse
|
||||
className={styles.logsSamplePanel}
|
||||
label={
|
||||
<div>
|
||||
Logs sample
|
||||
@ -126,20 +125,13 @@ export function LogsSamplePanel(props: Props) {
|
||||
}
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => {
|
||||
const scrollableLogsContainer = config.featureToggles.exploreScrollableLogsContainer;
|
||||
|
||||
return {
|
||||
logsSamplePanel: css`
|
||||
${scrollableLogsContainer && 'max-height: calc(100vh - 115px);'}
|
||||
`,
|
||||
logSamplesButton: css`
|
||||
position: absolute;
|
||||
top: ${theme.spacing(1)};
|
||||
right: ${theme.spacing(1)};
|
||||
`,
|
||||
logContainer: css`
|
||||
${scrollableLogsContainer && 'position: relative;'}
|
||||
${scrollableLogsContainer && 'height: 100%;'}
|
||||
overflow: scroll;
|
||||
`,
|
||||
infoTooltip: css`
|
||||
|
@ -162,11 +162,11 @@ describe('runQueries', () => {
|
||||
expect(getState().explore.panes.left!.graphResult).toBeDefined();
|
||||
});
|
||||
|
||||
it('should modify the request-id for all supplementary queries', () => {
|
||||
it('should modify the request-id for all supplementary queries', async () => {
|
||||
const { dispatch, getState } = setupTests();
|
||||
setupQueryResponse(getState());
|
||||
dispatch(saveCorrelationsAction({ exploreId: 'left', correlations: [] }));
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
|
||||
const state = getState().explore.panes.left!;
|
||||
expect(state.queryResponse.request?.requestId).toBe('explore_left');
|
||||
@ -899,7 +899,7 @@ describe('reducer', () => {
|
||||
});
|
||||
|
||||
it('should cancel any unfinished supplementary queries when a new query is run', async () => {
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
// first query is run automatically
|
||||
// loading in progress - subscriptions for both supplementary queries are created, not cleaned up yet
|
||||
expect(unsubscribes).toHaveLength(2);
|
||||
@ -907,7 +907,7 @@ describe('reducer', () => {
|
||||
expect(unsubscribes[1]).not.toBeCalled();
|
||||
|
||||
setupQueryResponse(getState());
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
// a new query is run while supplementary queries are not resolve yet...
|
||||
expect(unsubscribes[0]).toBeCalled();
|
||||
expect(unsubscribes[1]).toBeCalled();
|
||||
@ -917,8 +917,8 @@ describe('reducer', () => {
|
||||
expect(unsubscribes[3]).not.toBeCalled();
|
||||
});
|
||||
|
||||
it('should cancel all supported supplementary queries when the main query is canceled', () => {
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
it('should cancel all supported supplementary queries when the main query is canceled', async () => {
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
expect(unsubscribes).toHaveLength(2);
|
||||
expect(unsubscribes[0]).not.toBeCalled();
|
||||
expect(unsubscribes[1]).not.toBeCalled();
|
||||
@ -934,16 +934,16 @@ describe('reducer', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('should load supplementary queries after running the query', () => {
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
it('should load supplementary queries after running the query', async () => {
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
expect(unsubscribes).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should clean any incomplete supplementary queries data when main query is canceled', () => {
|
||||
it('should clean any incomplete supplementary queries data when main query is canceled', async () => {
|
||||
mockDataProvider = () => {
|
||||
return of({ state: LoadingState.Loading, error: undefined, data: [] });
|
||||
};
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
expect(getState().explore.panes.left!.supplementaryQueries[type].data).toBeDefined();
|
||||
@ -970,7 +970,7 @@ describe('reducer', () => {
|
||||
{ state: LoadingState.Done, error: undefined, data: [{}] }
|
||||
);
|
||||
};
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
|
||||
for (const types of supplementaryQueryTypes) {
|
||||
expect(getState().explore.panes.left!.supplementaryQueries[types].data).toBeDefined();
|
||||
@ -987,7 +987,7 @@ describe('reducer', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('do not load disabled supplementary query data', () => {
|
||||
it('do not load disabled supplementary query data', async () => {
|
||||
mockDataProvider = () => {
|
||||
return of({ state: LoadingState.Done, error: undefined, data: [{}] });
|
||||
};
|
||||
@ -999,7 +999,7 @@ describe('reducer', () => {
|
||||
expect(getState().explore.panes.left!.supplementaryQueries[SupplementaryQueryType.LogsSample].enabled).toBe(true);
|
||||
|
||||
// verify that if we run a query, it will: 1) not do logs volume, 2) do logs sample 3) provider will still be set for both
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
|
||||
expect(
|
||||
getState().explore.panes.left!.supplementaryQueries[SupplementaryQueryType.LogsVolume].data
|
||||
@ -1026,7 +1026,7 @@ describe('reducer', () => {
|
||||
dispatch(setSupplementaryQueryEnabled('left', false, SupplementaryQueryType.LogsSample));
|
||||
|
||||
// runQueries sets up providers, but does not run queries
|
||||
dispatch(runQueries({ exploreId: 'left' }));
|
||||
await dispatch(runQueries({ exploreId: 'left' }));
|
||||
expect(
|
||||
getState().explore.panes.left!.supplementaryQueries[SupplementaryQueryType.LogsVolume].dataProvider
|
||||
).toBeDefined();
|
||||
|
@ -10,6 +10,7 @@ import {
|
||||
DataQueryErrorType,
|
||||
DataQueryResponse,
|
||||
DataSourceApi,
|
||||
dateTimeForTimeZone,
|
||||
hasQueryExportSupport,
|
||||
hasQueryImportSupport,
|
||||
HistoryItem,
|
||||
@ -30,13 +31,14 @@ import {
|
||||
generateEmptyQuery,
|
||||
generateNewKeyAndAddRefIdIfMissing,
|
||||
getQueryKeys,
|
||||
getTimeRange,
|
||||
hasNonEmptyQuery,
|
||||
stopQueryState,
|
||||
updateHistory,
|
||||
} from 'app/core/utils/explore';
|
||||
import { getShiftedTimeRange } from 'app/core/utils/timePicker';
|
||||
import { getCorrelationsBySourceUIDs } from 'app/features/correlations/utils';
|
||||
import { getTimeZone } from 'app/features/profile/state/selectors';
|
||||
import { getFiscalYearStartMonth, getTimeZone } from 'app/features/profile/state/selectors';
|
||||
import { MIXED_DATASOURCE_NAME } from 'app/plugins/datasource/mixed/MixedDataSource';
|
||||
import {
|
||||
createAsyncThunk,
|
||||
@ -53,7 +55,7 @@ import { notifyApp } from '../../../core/actions';
|
||||
import { createErrorNotification } from '../../../core/copy/appNotification';
|
||||
import { runRequest } from '../../query/state/runRequest';
|
||||
import { visualisationTypeKey } from '../Logs/utils/logs';
|
||||
import { decorateData } from '../utils/decorators';
|
||||
import { decorateData, mergeDataSeries } from '../utils/decorators';
|
||||
import {
|
||||
getSupplementaryQueryProvider,
|
||||
storeSupplementaryQueryEnabled,
|
||||
@ -65,7 +67,13 @@ import { saveCorrelationsAction } from './explorePane';
|
||||
import { addHistoryItem, historyUpdatedAction, loadRichHistory } from './history';
|
||||
import { changeCorrelationEditorDetails } from './main';
|
||||
import { updateTime } from './time';
|
||||
import { createCacheKey, filterLogRowsByIndex, getDatasourceUIDs, getResultsFromCache } from './utils';
|
||||
import {
|
||||
createCacheKey,
|
||||
filterLogRowsByIndex,
|
||||
getCorrelationsData,
|
||||
getDatasourceUIDs,
|
||||
getResultsFromCache,
|
||||
} from './utils';
|
||||
|
||||
/**
|
||||
* Derives from explore state if a given Explore pane is waiting for more data to be received
|
||||
@ -501,17 +509,21 @@ export const runQueries = createAsyncThunk<void, RunQueriesOptions>(
|
||||
async ({ exploreId, preserveCache }, { dispatch, getState }) => {
|
||||
dispatch(cancelQueries(exploreId));
|
||||
|
||||
dispatch(updateTime({ exploreId }));
|
||||
|
||||
const { defaultCorrelationEditorDatasource, scopedVars, showCorrelationEditorLinks } = await getCorrelationsData(
|
||||
getState(),
|
||||
exploreId
|
||||
);
|
||||
const correlations$ = getCorrelations(exploreId);
|
||||
|
||||
dispatch(updateTime({ exploreId }));
|
||||
|
||||
// We always want to clear cache unless we explicitly pass preserveCache parameter
|
||||
if (preserveCache !== true) {
|
||||
dispatch(clearCache(exploreId));
|
||||
}
|
||||
|
||||
const exploreItemState = getState().explore.panes[exploreId]!;
|
||||
|
||||
const exploreState = getState();
|
||||
const exploreItemState = exploreState.explore.panes[exploreId]!;
|
||||
const {
|
||||
datasourceInstance,
|
||||
containerWidth,
|
||||
@ -524,14 +536,8 @@ export const runQueries = createAsyncThunk<void, RunQueriesOptions>(
|
||||
absoluteRange,
|
||||
cache,
|
||||
supplementaryQueries,
|
||||
correlationEditorHelperData,
|
||||
} = exploreItemState;
|
||||
const isCorrelationEditorMode = getState().explore.correlationEditorDetails?.editorMode || false;
|
||||
const isLeftPane = Object.keys(getState().explore.panes)[0] === exploreId;
|
||||
const showCorrelationEditorLinks = isCorrelationEditorMode && isLeftPane;
|
||||
const defaultCorrelationEditorDatasource = showCorrelationEditorLinks ? await getDataSourceSrv().get() : undefined;
|
||||
const interpolateCorrelationHelperVars =
|
||||
isCorrelationEditorMode && !isLeftPane && correlationEditorHelperData !== undefined;
|
||||
|
||||
let newQuerySource: Observable<ExplorePanelData>;
|
||||
let newQuerySubscription: SubscriptionLike;
|
||||
|
||||
@ -591,13 +597,6 @@ export const runQueries = createAsyncThunk<void, RunQueriesOptions>(
|
||||
liveStreaming: live,
|
||||
};
|
||||
|
||||
let scopedVars: ScopedVars = {};
|
||||
if (interpolateCorrelationHelperVars && correlationEditorHelperData !== undefined) {
|
||||
Object.entries(correlationEditorHelperData?.vars).forEach((variable) => {
|
||||
scopedVars[variable[0]] = { value: variable[1] };
|
||||
});
|
||||
}
|
||||
|
||||
const timeZone = getTimeZone(getState().user);
|
||||
const transaction = buildQueryTransaction(
|
||||
exploreId,
|
||||
@ -702,6 +701,87 @@ export const runQueries = createAsyncThunk<void, RunQueriesOptions>(
|
||||
}
|
||||
);
|
||||
|
||||
interface RunLoadMoreLogsQueriesOptions {
|
||||
exploreId: string;
|
||||
absoluteRange: AbsoluteTimeRange;
|
||||
}
|
||||
/**
|
||||
* Dedicated action to run log queries requesting more results.
|
||||
*/
|
||||
export const runLoadMoreLogsQueries = createAsyncThunk<void, RunLoadMoreLogsQueriesOptions>(
|
||||
'explore/runLoadMoreQueries',
|
||||
async ({ exploreId, absoluteRange }, { dispatch, getState }) => {
|
||||
dispatch(cancelQueries(exploreId));
|
||||
|
||||
const { datasourceInstance, containerWidth, queryResponse } = getState().explore.panes[exploreId]!;
|
||||
const { defaultCorrelationEditorDatasource, scopedVars, showCorrelationEditorLinks } = await getCorrelationsData(
|
||||
getState(),
|
||||
exploreId
|
||||
);
|
||||
const correlations$ = getCorrelations(exploreId);
|
||||
|
||||
let newQuerySource: Observable<ExplorePanelData>;
|
||||
|
||||
const logQueries = queryResponse.logsResult?.queries || [];
|
||||
const queries = logQueries.map((query: DataQuery) => ({
|
||||
...query,
|
||||
datasource: query.datasource || datasourceInstance?.getRef(),
|
||||
}));
|
||||
|
||||
if (!hasNonEmptyQuery(queries) || !datasourceInstance) {
|
||||
return;
|
||||
}
|
||||
|
||||
const queryOptions: QueryOptions = {
|
||||
minInterval: datasourceInstance?.interval,
|
||||
maxDataPoints: containerWidth,
|
||||
};
|
||||
|
||||
const timeZone = getTimeZone(getState().user);
|
||||
const range = getTimeRange(
|
||||
timeZone,
|
||||
{
|
||||
from: dateTimeForTimeZone(timeZone, absoluteRange.from),
|
||||
to: dateTimeForTimeZone(timeZone, absoluteRange.to),
|
||||
},
|
||||
getFiscalYearStartMonth(getState().user)
|
||||
);
|
||||
const transaction = buildQueryTransaction(exploreId, queries, queryOptions, range, false, timeZone, scopedVars);
|
||||
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Loading }));
|
||||
|
||||
newQuerySource = combineLatest([runRequest(datasourceInstance, transaction.request), correlations$]).pipe(
|
||||
mergeMap(([data, correlations]) =>
|
||||
decorateData(
|
||||
// Query splitting, otherwise duplicates results
|
||||
data.state === LoadingState.Done ? mergeDataSeries(queryResponse, data) : data,
|
||||
queryResponse,
|
||||
absoluteRange,
|
||||
undefined,
|
||||
queries,
|
||||
correlations,
|
||||
showCorrelationEditorLinks,
|
||||
defaultCorrelationEditorDatasource
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
newQuerySource.subscribe({
|
||||
next(data) {
|
||||
dispatch(queryStreamUpdatedAction({ exploreId, response: data }));
|
||||
},
|
||||
error(error) {
|
||||
dispatch(notifyApp(createErrorNotification('Query processing error', error)));
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Error }));
|
||||
console.error(error);
|
||||
},
|
||||
complete() {
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Done }));
|
||||
},
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
const groupDataQueries = async (datasources: DataQuery[], scopedVars: ScopedVars) => {
|
||||
const nonMixedDataSources = datasources.filter((t) => {
|
||||
return t.datasource?.uid !== MIXED_DATASOURCE_NAME;
|
||||
|
@ -11,7 +11,7 @@ import { getFiscalYearStartMonth, getTimeZone } from 'app/features/profile/state
|
||||
import { ExploreItemState, ThunkDispatch, ThunkResult } from 'app/types';
|
||||
|
||||
import { syncTimesAction } from './main';
|
||||
import { runQueries } from './query';
|
||||
import { runLoadMoreLogsQueries, runQueries } from './query';
|
||||
|
||||
//
|
||||
// Actions and Payloads
|
||||
@ -54,6 +54,12 @@ export const updateTimeRange = (options: {
|
||||
};
|
||||
};
|
||||
|
||||
export const loadMoreLogs = (options: { exploreId: string; absoluteRange: AbsoluteTimeRange }): ThunkResult<void> => {
|
||||
return (dispatch) => {
|
||||
dispatch(runLoadMoreLogsQueries({ ...options }));
|
||||
};
|
||||
};
|
||||
|
||||
export const updateTime = (config: {
|
||||
exploreId: string;
|
||||
rawRange?: RawTimeRange;
|
||||
|
@ -13,15 +13,17 @@ import {
|
||||
LogRowModel,
|
||||
PanelData,
|
||||
RawTimeRange,
|
||||
ScopedVars,
|
||||
TimeFragment,
|
||||
TimeRange,
|
||||
toUtc,
|
||||
URLRange,
|
||||
URLRangeValue,
|
||||
} from '@grafana/data';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
import { DataQuery, DataSourceRef, TimeZone } from '@grafana/schema';
|
||||
import { MIXED_DATASOURCE_NAME } from 'app/plugins/datasource/mixed/MixedDataSource';
|
||||
import { ExplorePanelData } from 'app/types';
|
||||
import { ExplorePanelData, StoreState } from 'app/types';
|
||||
import { ExploreItemState } from 'app/types/explore';
|
||||
|
||||
import store from '../../../core/store';
|
||||
@ -235,3 +237,23 @@ export const getDatasourceUIDs = (datasourceUID: string, queries: DataQuery[]):
|
||||
return [datasourceUID];
|
||||
}
|
||||
};
|
||||
|
||||
export async function getCorrelationsData(state: StoreState, exploreId: string) {
|
||||
const correlationEditorHelperData = state.explore.panes[exploreId]!.correlationEditorHelperData;
|
||||
|
||||
const isCorrelationEditorMode = state.explore.correlationEditorDetails?.editorMode || false;
|
||||
const isLeftPane = Object.keys(state.explore.panes)[0] === exploreId;
|
||||
const showCorrelationEditorLinks = isCorrelationEditorMode && isLeftPane;
|
||||
const defaultCorrelationEditorDatasource = showCorrelationEditorLinks ? await getDataSourceSrv().get() : undefined;
|
||||
const interpolateCorrelationHelperVars =
|
||||
isCorrelationEditorMode && !isLeftPane && correlationEditorHelperData !== undefined;
|
||||
|
||||
let scopedVars: ScopedVars = {};
|
||||
if (interpolateCorrelationHelperVars && correlationEditorHelperData !== undefined) {
|
||||
Object.entries(correlationEditorHelperData?.vars).forEach((variable) => {
|
||||
scopedVars[variable[0]] = { value: variable[1] };
|
||||
});
|
||||
}
|
||||
|
||||
return { defaultCorrelationEditorDatasource, scopedVars, showCorrelationEditorLinks };
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ import {
|
||||
} from '@grafana/data';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { DataQuery } from '@grafana/schema';
|
||||
import { combineResponses } from 'app/features/logs/response';
|
||||
|
||||
import { refreshIntervalToSortOrder } from '../../../core/utils/explore';
|
||||
import { ExplorePanelData } from '../../../types';
|
||||
@ -311,6 +312,11 @@ export function decorateData(
|
||||
);
|
||||
}
|
||||
|
||||
export function mergeDataSeries(currentData: PanelData, newData: PanelData): PanelData {
|
||||
currentData.series = combineResponses({ data: currentData.series }, { data: newData.series }).data;
|
||||
return currentData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if frame contains time series, which for our purpose means 1 time column and 1 or more numeric columns.
|
||||
*/
|
||||
|
263
public/app/features/logs/components/InfiniteScroll.test.tsx
Normal file
263
public/app/features/logs/components/InfiniteScroll.test.tsx
Normal file
@ -0,0 +1,263 @@
|
||||
import { act, render, screen } from '@testing-library/react';
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
|
||||
import { LogRowModel, dateTimeForTimeZone } from '@grafana/data';
|
||||
import { convertRawToRange } from '@grafana/data/src/datetime/rangeutil';
|
||||
import { LogsSortOrder } from '@grafana/schema';
|
||||
|
||||
import { InfiniteScroll, Props, SCROLLING_THRESHOLD } from './InfiniteScroll';
|
||||
import { createLogRow } from './__mocks__/logRow';
|
||||
|
||||
const defaultTz = 'browser';
|
||||
|
||||
const absoluteRange = {
|
||||
from: 1702578600000,
|
||||
to: 1702578900000,
|
||||
};
|
||||
const defaultRange = convertRawToRange({
|
||||
from: dateTimeForTimeZone(defaultTz, absoluteRange.from),
|
||||
to: dateTimeForTimeZone(defaultTz, absoluteRange.to),
|
||||
});
|
||||
|
||||
const defaultProps: Omit<Props, 'children'> = {
|
||||
loading: false,
|
||||
loadMoreLogs: jest.fn(),
|
||||
range: defaultRange,
|
||||
rows: [],
|
||||
sortOrder: LogsSortOrder.Descending,
|
||||
timeZone: 'browser',
|
||||
};
|
||||
|
||||
function ScrollWithWrapper({ children, ...props }: Props) {
|
||||
const [initialized, setInitialized] = useState(false);
|
||||
const scrollRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
// Required to get the ref
|
||||
if (scrollRef.current && !initialized) {
|
||||
setInitialized(true);
|
||||
}
|
||||
}, [initialized]);
|
||||
|
||||
return (
|
||||
<div style={{ height: 40, overflowY: 'scroll' }} ref={scrollRef} data-testid="scroll-element">
|
||||
{initialized && (
|
||||
<InfiniteScroll {...props} scrollElement={scrollRef.current!}>
|
||||
{children}
|
||||
</InfiniteScroll>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
describe('InfiniteScroll', () => {
|
||||
test('Wraps components without adding DOM elements', async () => {
|
||||
const { container } = render(
|
||||
<ScrollWithWrapper {...defaultProps}>
|
||||
<div data-testid="contents" />
|
||||
</ScrollWithWrapper>
|
||||
);
|
||||
|
||||
expect(await screen.findByTestId('contents')).toBeInTheDocument();
|
||||
expect(container).toMatchInlineSnapshot(`
|
||||
<div>
|
||||
<div
|
||||
data-testid="scroll-element"
|
||||
style="height: 40px; overflow-y: scroll;"
|
||||
>
|
||||
<div
|
||||
data-testid="contents"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
`);
|
||||
});
|
||||
|
||||
describe.each([LogsSortOrder.Descending, LogsSortOrder.Ascending])(
|
||||
'When the sort order is %s',
|
||||
(order: LogsSortOrder) => {
|
||||
let rows: LogRowModel[];
|
||||
beforeEach(() => {
|
||||
rows = createLogRows(absoluteRange.from + 2 * SCROLLING_THRESHOLD, absoluteRange.to - 2 * SCROLLING_THRESHOLD);
|
||||
});
|
||||
|
||||
function setup(loadMoreMock: () => void, startPosition: number) {
|
||||
const { element, events } = getMockElement(startPosition);
|
||||
render(
|
||||
<InfiniteScroll
|
||||
{...defaultProps}
|
||||
sortOrder={order}
|
||||
rows={rows}
|
||||
scrollElement={element as unknown as HTMLDivElement}
|
||||
loadMoreLogs={loadMoreMock}
|
||||
>
|
||||
<div data-testid="contents" style={{ height: 100 }} />
|
||||
</InfiniteScroll>
|
||||
);
|
||||
return { element, events };
|
||||
}
|
||||
|
||||
test.each([
|
||||
['top', 10, 0],
|
||||
['bottom', 90, 100],
|
||||
])('Requests more logs when scrolling %s', async (_: string, startPosition: number, endPosition: number) => {
|
||||
const loadMoreMock = jest.fn();
|
||||
const { element, events } = setup(loadMoreMock, startPosition);
|
||||
|
||||
expect(await screen.findByTestId('contents')).toBeInTheDocument();
|
||||
element.scrollTop = endPosition;
|
||||
|
||||
act(() => {
|
||||
events['scroll'](new Event('scroll'));
|
||||
});
|
||||
|
||||
expect(loadMoreMock).toHaveBeenCalled();
|
||||
expect(await screen.findByTestId('Spinner')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test.each([
|
||||
['up', -5, 0],
|
||||
['down', 5, 100],
|
||||
])(
|
||||
'Requests more logs when moving the mousewheel %s',
|
||||
async (_: string, deltaY: number, startPosition: number) => {
|
||||
const loadMoreMock = jest.fn();
|
||||
const { events } = setup(loadMoreMock, startPosition);
|
||||
|
||||
expect(await screen.findByTestId('contents')).toBeInTheDocument();
|
||||
|
||||
act(() => {
|
||||
const event = new WheelEvent('wheel', { deltaY });
|
||||
events['wheel'](event);
|
||||
});
|
||||
|
||||
expect(loadMoreMock).toHaveBeenCalled();
|
||||
expect(await screen.findByTestId('Spinner')).toBeInTheDocument();
|
||||
}
|
||||
);
|
||||
|
||||
test('Does not request more logs when there is no scroll', async () => {
|
||||
const loadMoreMock = jest.fn();
|
||||
const { element, events } = setup(loadMoreMock, 0);
|
||||
|
||||
expect(await screen.findByTestId('contents')).toBeInTheDocument();
|
||||
element.clientHeight = 40;
|
||||
element.scrollHeight = element.clientHeight;
|
||||
|
||||
act(() => {
|
||||
events['scroll'](new Event('scroll'));
|
||||
});
|
||||
|
||||
expect(loadMoreMock).not.toHaveBeenCalled();
|
||||
expect(screen.queryByTestId('Spinner')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
describe('With absolute range', () => {
|
||||
function setup(loadMoreMock: () => void, startPosition: number, rows: LogRowModel[]) {
|
||||
const { element, events } = getMockElement(startPosition);
|
||||
render(
|
||||
<InfiniteScroll
|
||||
{...defaultProps}
|
||||
sortOrder={order}
|
||||
rows={rows}
|
||||
scrollElement={element as unknown as HTMLDivElement}
|
||||
loadMoreLogs={loadMoreMock}
|
||||
>
|
||||
<div data-testid="contents" style={{ height: 100 }} />
|
||||
</InfiniteScroll>
|
||||
);
|
||||
return { element, events };
|
||||
}
|
||||
|
||||
test.each([
|
||||
['top', 10, 0],
|
||||
['bottom', 90, 100],
|
||||
])(
|
||||
'It does not request more when scrolling %s',
|
||||
async (_: string, startPosition: number, endPosition: number) => {
|
||||
const rows = createLogRows(absoluteRange.from, absoluteRange.to);
|
||||
const loadMoreMock = jest.fn();
|
||||
const { element, events } = setup(loadMoreMock, startPosition, rows);
|
||||
|
||||
expect(await screen.findByTestId('contents')).toBeInTheDocument();
|
||||
element.scrollTop = endPosition;
|
||||
|
||||
act(() => {
|
||||
events['scroll'](new Event('scroll'));
|
||||
});
|
||||
|
||||
expect(loadMoreMock).not.toHaveBeenCalled();
|
||||
expect(screen.queryByTestId('Spinner')).not.toBeInTheDocument();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('With relative range', () => {
|
||||
function setup(loadMoreMock: () => void, startPosition: number, rows: LogRowModel[]) {
|
||||
const { element, events } = getMockElement(startPosition);
|
||||
render(
|
||||
<InfiniteScroll
|
||||
{...defaultProps}
|
||||
sortOrder={order}
|
||||
rows={rows}
|
||||
scrollElement={element as unknown as HTMLDivElement}
|
||||
loadMoreLogs={loadMoreMock}
|
||||
>
|
||||
<div data-testid="contents" style={{ height: 100 }} />
|
||||
</InfiniteScroll>
|
||||
);
|
||||
return { element, events };
|
||||
}
|
||||
|
||||
test.each([
|
||||
['top', 10, 0],
|
||||
['bottom', 90, 100],
|
||||
])(
|
||||
'It does not request more when scrolling %s',
|
||||
async (_: string, startPosition: number, endPosition: number) => {
|
||||
const rows = createLogRows(absoluteRange.from, absoluteRange.to);
|
||||
const loadMoreMock = jest.fn();
|
||||
const { element, events } = setup(loadMoreMock, startPosition, rows);
|
||||
|
||||
expect(await screen.findByTestId('contents')).toBeInTheDocument();
|
||||
element.scrollTop = endPosition;
|
||||
|
||||
act(() => {
|
||||
events['scroll'](new Event('scroll'));
|
||||
});
|
||||
|
||||
expect(loadMoreMock).not.toHaveBeenCalled();
|
||||
expect(screen.queryByTestId('Spinner')).not.toBeInTheDocument();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
function createLogRows(from: number, to: number) {
|
||||
const rows = [createLogRow({ entry: 'line1' }), createLogRow({ entry: 'line2' })];
|
||||
// Time field
|
||||
rows[0].dataFrame.fields[0].values = [from, to];
|
||||
rows[0].timeEpochMs = from;
|
||||
rows[1].dataFrame.fields[0].values = [from, to];
|
||||
rows[1].timeEpochMs = to;
|
||||
return rows;
|
||||
}
|
||||
|
||||
// JSDOM doesn't support layout, so we will mock the expected attribute values for the test cases.
|
||||
function getMockElement(scrollTop: number) {
|
||||
const events: Record<string, (e: Event | WheelEvent) => void> = {};
|
||||
const element = {
|
||||
addEventListener: (event: string, callback: (e: Event | WheelEvent) => void) => {
|
||||
events[event] = callback;
|
||||
},
|
||||
removeEventListener: jest.fn(),
|
||||
stopImmediatePropagation: jest.fn(),
|
||||
scrollHeight: 100,
|
||||
clientHeight: 40,
|
||||
scrollTop,
|
||||
};
|
||||
|
||||
return { element, events };
|
||||
}
|
224
public/app/features/logs/components/InfiniteScroll.tsx
Normal file
224
public/app/features/logs/components/InfiniteScroll.tsx
Normal file
@ -0,0 +1,224 @@
|
||||
import { css } from '@emotion/css';
|
||||
import React, { ReactNode, useEffect, useState } from 'react';
|
||||
|
||||
import { AbsoluteTimeRange, LogRowModel, TimeRange } from '@grafana/data';
|
||||
import { convertRawToRange, isRelativeTime, isRelativeTimeRange } from '@grafana/data/src/datetime/rangeutil';
|
||||
import { reportInteraction } from '@grafana/runtime';
|
||||
import { LogsSortOrder, TimeZone } from '@grafana/schema';
|
||||
import { Spinner } from '@grafana/ui';
|
||||
|
||||
export type Props = {
|
||||
children: ReactNode;
|
||||
loading: boolean;
|
||||
loadMoreLogs?: (range: AbsoluteTimeRange) => void;
|
||||
range: TimeRange;
|
||||
rows: LogRowModel[];
|
||||
scrollElement?: HTMLDivElement;
|
||||
sortOrder: LogsSortOrder;
|
||||
timeZone: TimeZone;
|
||||
};
|
||||
|
||||
export const InfiniteScroll = ({
|
||||
children,
|
||||
loading,
|
||||
loadMoreLogs,
|
||||
range,
|
||||
rows,
|
||||
scrollElement,
|
||||
sortOrder,
|
||||
timeZone,
|
||||
}: Props) => {
|
||||
const [upperOutOfRange, setUpperOutOfRange] = useState(false);
|
||||
const [lowerOutOfRange, setLowerOutOfRange] = useState(false);
|
||||
const [upperLoading, setUpperLoading] = useState(false);
|
||||
const [lowerLoading, setLowerLoading] = useState(false);
|
||||
const [lastScroll, setLastScroll] = useState(scrollElement?.scrollTop || 0);
|
||||
|
||||
useEffect(() => {
|
||||
setUpperOutOfRange(false);
|
||||
setLowerOutOfRange(false);
|
||||
}, [range, rows, sortOrder]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!loading) {
|
||||
setUpperLoading(false);
|
||||
setLowerLoading(false);
|
||||
}
|
||||
}, [loading]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!scrollElement || !loadMoreLogs) {
|
||||
return;
|
||||
}
|
||||
|
||||
function handleScroll(event: Event | WheelEvent) {
|
||||
if (!scrollElement || !loadMoreLogs || !rows.length || loading) {
|
||||
return;
|
||||
}
|
||||
event.stopImmediatePropagation();
|
||||
setLastScroll(scrollElement.scrollTop);
|
||||
const scrollDirection = shouldLoadMore(event, scrollElement, lastScroll);
|
||||
if (scrollDirection === ScrollDirection.NoScroll) {
|
||||
return;
|
||||
} else if (scrollDirection === ScrollDirection.Top) {
|
||||
scrollTop();
|
||||
} else {
|
||||
scrollBottom();
|
||||
}
|
||||
}
|
||||
|
||||
function scrollTop() {
|
||||
if (!canScrollTop(getVisibleRange(rows), range, timeZone, sortOrder)) {
|
||||
setUpperOutOfRange(true);
|
||||
return;
|
||||
}
|
||||
setUpperOutOfRange(false);
|
||||
const newRange =
|
||||
sortOrder === LogsSortOrder.Descending
|
||||
? getNextRange(getVisibleRange(rows), range, timeZone)
|
||||
: getPrevRange(getVisibleRange(rows), range);
|
||||
loadMoreLogs?.(newRange);
|
||||
setUpperLoading(true);
|
||||
reportInteraction('grafana_logs_infinite_scrolling', {
|
||||
direction: 'top',
|
||||
sort_order: sortOrder,
|
||||
});
|
||||
}
|
||||
|
||||
function scrollBottom() {
|
||||
if (!canScrollBottom(getVisibleRange(rows), range, timeZone, sortOrder)) {
|
||||
setLowerOutOfRange(true);
|
||||
return;
|
||||
}
|
||||
setLowerOutOfRange(false);
|
||||
const newRange =
|
||||
sortOrder === LogsSortOrder.Descending
|
||||
? getPrevRange(getVisibleRange(rows), range)
|
||||
: getNextRange(getVisibleRange(rows), range, timeZone);
|
||||
loadMoreLogs?.(newRange);
|
||||
setLowerLoading(true);
|
||||
reportInteraction('grafana_logs_infinite_scrolling', {
|
||||
direction: 'bottom',
|
||||
sort_order: sortOrder,
|
||||
});
|
||||
}
|
||||
|
||||
scrollElement.addEventListener('scroll', handleScroll);
|
||||
scrollElement.addEventListener('wheel', handleScroll);
|
||||
|
||||
return () => {
|
||||
scrollElement.removeEventListener('scroll', handleScroll);
|
||||
scrollElement.removeEventListener('wheel', handleScroll);
|
||||
};
|
||||
}, [lastScroll, loadMoreLogs, loading, range, rows, scrollElement, sortOrder, timeZone]);
|
||||
|
||||
// We allow "now" to move when using relative time, so we hide the message so it doesn't flash.
|
||||
const hideTopMessage = sortOrder === LogsSortOrder.Descending && isRelativeTime(range.raw.to);
|
||||
const hideBottomMessage = sortOrder === LogsSortOrder.Ascending && isRelativeTime(range.raw.to);
|
||||
|
||||
return (
|
||||
<>
|
||||
{upperLoading && loadingMessage}
|
||||
{!hideTopMessage && upperOutOfRange && outOfRangeMessage}
|
||||
{children}
|
||||
{!hideBottomMessage && lowerOutOfRange && outOfRangeMessage}
|
||||
{lowerLoading && loadingMessage}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const styles = {
|
||||
limitReached: css({
|
||||
textAlign: 'center',
|
||||
padding: 0.25,
|
||||
}),
|
||||
};
|
||||
|
||||
const outOfRangeMessage = <div className={styles.limitReached}>End of the selected time range.</div>;
|
||||
const loadingMessage = (
|
||||
<div className={styles.limitReached}>
|
||||
<Spinner />
|
||||
</div>
|
||||
);
|
||||
|
||||
enum ScrollDirection {
|
||||
Top = -1,
|
||||
Bottom = 1,
|
||||
NoScroll = 0,
|
||||
}
|
||||
function shouldLoadMore(event: Event | WheelEvent, element: HTMLDivElement, lastScroll: number): ScrollDirection {
|
||||
// Disable behavior if there is no scroll
|
||||
if (element.scrollHeight <= element.clientHeight) {
|
||||
return ScrollDirection.NoScroll;
|
||||
}
|
||||
const delta = event instanceof WheelEvent ? event.deltaY : element.scrollTop - lastScroll;
|
||||
if (delta === 0) {
|
||||
return ScrollDirection.NoScroll;
|
||||
}
|
||||
const scrollDirection = delta < 0 ? ScrollDirection.Top : ScrollDirection.Bottom;
|
||||
const diff =
|
||||
scrollDirection === ScrollDirection.Top
|
||||
? element.scrollTop
|
||||
: element.scrollHeight - element.scrollTop - element.clientHeight;
|
||||
const coef = 1;
|
||||
|
||||
return diff <= coef ? scrollDirection : ScrollDirection.NoScroll;
|
||||
}
|
||||
|
||||
function getVisibleRange(rows: LogRowModel[]) {
|
||||
const firstTimeStamp = rows[0].timeEpochMs;
|
||||
const lastTimeStamp = rows[rows.length - 1].timeEpochMs;
|
||||
|
||||
const visibleRange =
|
||||
lastTimeStamp < firstTimeStamp
|
||||
? { from: lastTimeStamp, to: firstTimeStamp }
|
||||
: { from: firstTimeStamp, to: lastTimeStamp };
|
||||
|
||||
return visibleRange;
|
||||
}
|
||||
|
||||
function getPrevRange(visibleRange: AbsoluteTimeRange, currentRange: TimeRange) {
|
||||
return { from: currentRange.from.valueOf(), to: visibleRange.from };
|
||||
}
|
||||
|
||||
function getNextRange(visibleRange: AbsoluteTimeRange, currentRange: TimeRange, timeZone: TimeZone) {
|
||||
// When requesting new logs, update the current range if using relative time ranges.
|
||||
currentRange = updateCurrentRange(currentRange, timeZone);
|
||||
return { from: visibleRange.to, to: currentRange.to.valueOf() };
|
||||
}
|
||||
|
||||
export const SCROLLING_THRESHOLD = 1e3;
|
||||
|
||||
// To get more logs, the difference between the visible range and the current range should be 1 second or more.
|
||||
function canScrollTop(
|
||||
visibleRange: AbsoluteTimeRange,
|
||||
currentRange: TimeRange,
|
||||
timeZone: TimeZone,
|
||||
sortOrder: LogsSortOrder
|
||||
) {
|
||||
if (sortOrder === LogsSortOrder.Descending) {
|
||||
// When requesting new logs, update the current range if using relative time ranges.
|
||||
currentRange = updateCurrentRange(currentRange, timeZone);
|
||||
return currentRange.to.valueOf() - visibleRange.to > SCROLLING_THRESHOLD;
|
||||
}
|
||||
return Math.abs(currentRange.from.valueOf() - visibleRange.from) > SCROLLING_THRESHOLD;
|
||||
}
|
||||
|
||||
function canScrollBottom(
|
||||
visibleRange: AbsoluteTimeRange,
|
||||
currentRange: TimeRange,
|
||||
timeZone: TimeZone,
|
||||
sortOrder: LogsSortOrder
|
||||
) {
|
||||
if (sortOrder === LogsSortOrder.Descending) {
|
||||
return Math.abs(currentRange.from.valueOf() - visibleRange.from) > SCROLLING_THRESHOLD;
|
||||
}
|
||||
// When requesting new logs, update the current range if using relative time ranges.
|
||||
currentRange = updateCurrentRange(currentRange, timeZone);
|
||||
return currentRange.to.valueOf() - visibleRange.to > SCROLLING_THRESHOLD;
|
||||
}
|
||||
|
||||
// Given a TimeRange, returns a new instance if using relative time, or else the same.
|
||||
function updateCurrentRange(timeRange: TimeRange, timeZone: TimeZone) {
|
||||
return isRelativeTimeRange(timeRange.raw) ? convertRawToRange(timeRange.raw, timeZone) : timeRange;
|
||||
}
|
@ -3,7 +3,6 @@ import memoizeOne from 'memoize-one';
|
||||
import tinycolor from 'tinycolor2';
|
||||
|
||||
import { colorManipulator, GrafanaTheme2, LogLevel } from '@grafana/data';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { styleMixins } from '@grafana/ui';
|
||||
|
||||
export const getLogLevelStyles = (theme: GrafanaTheme2, logLevel?: LogLevel) => {
|
||||
@ -44,7 +43,6 @@ export const getLogLevelStyles = (theme: GrafanaTheme2, logLevel?: LogLevel) =>
|
||||
export const getLogRowStyles = memoizeOne((theme: GrafanaTheme2) => {
|
||||
const hoverBgColor = styleMixins.hoverColor(theme.colors.background.secondary, theme);
|
||||
const contextOutlineColor = tinycolor(theme.components.dashboard.background).setAlpha(0.7).toRgbString();
|
||||
const scrollableLogsContainer = config.featureToggles.exploreScrollableLogsContainer;
|
||||
return {
|
||||
logsRowLevel: css`
|
||||
label: logs-row__level;
|
||||
@ -75,7 +73,6 @@ export const getLogRowStyles = memoizeOne((theme: GrafanaTheme2) => {
|
||||
font-family: ${theme.typography.fontFamilyMonospace};
|
||||
font-size: ${theme.typography.bodySmall.fontSize};
|
||||
width: 100%;
|
||||
${!scrollableLogsContainer && `margin-bottom: ${theme.spacing(2.25)};`}
|
||||
position: relative;
|
||||
`,
|
||||
logsRowsTableContain: css`
|
||||
|
@ -2,6 +2,7 @@ import { Observable } from 'rxjs';
|
||||
|
||||
import {
|
||||
arrayToDataFrame,
|
||||
createDataFrame,
|
||||
DataFrame,
|
||||
DataQuery,
|
||||
DataQueryRequest,
|
||||
@ -16,10 +17,10 @@ import {
|
||||
LogsMetaKind,
|
||||
LogsVolumeCustomMetaData,
|
||||
LogsVolumeType,
|
||||
MutableDataFrame,
|
||||
sortDataFrame,
|
||||
toDataFrame,
|
||||
} from '@grafana/data';
|
||||
import { config } from '@grafana/runtime';
|
||||
|
||||
import { MockObservableDataSourceApi } from '../../../test/mocks/datasource_srv';
|
||||
|
||||
@ -244,7 +245,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given series without a time field should return empty logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new MutableDataFrame({
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'message',
|
||||
@ -259,7 +260,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given series without a string field should return empty logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new MutableDataFrame({
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
@ -274,7 +275,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given one series should return expected logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new MutableDataFrame({
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
@ -358,9 +359,48 @@ describe('dataFrameToLogsModel', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('with infinite scrolling enabled it should return expected logs model', () => {
|
||||
config.featureToggles.logsInfiniteScrolling = true;
|
||||
|
||||
const series: DataFrame[] = [
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
values: ['2019-04-26T09:28:11.352440161Z'],
|
||||
},
|
||||
{
|
||||
name: 'message',
|
||||
type: FieldType.string,
|
||||
values: ['t=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server'],
|
||||
labels: {},
|
||||
},
|
||||
{
|
||||
name: 'id',
|
||||
type: FieldType.string,
|
||||
values: ['foo'],
|
||||
},
|
||||
],
|
||||
meta: {
|
||||
limit: 1000,
|
||||
},
|
||||
refId: 'A',
|
||||
}),
|
||||
];
|
||||
const logsModel = dataFrameToLogsModel(series, 1);
|
||||
expect(logsModel.meta![0]).toMatchObject({
|
||||
label: LIMIT_LABEL,
|
||||
value: `1000 (1 displayed)`,
|
||||
kind: LogsMetaKind.String,
|
||||
});
|
||||
|
||||
config.featureToggles.logsInfiniteScrolling = false;
|
||||
});
|
||||
|
||||
it('given one series with limit as custom meta property should return correct limit', () => {
|
||||
const series: DataFrame[] = [
|
||||
new MutableDataFrame({
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
@ -402,7 +442,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given one series with labels-field should return expected logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new MutableDataFrame({
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'labels',
|
||||
@ -516,15 +556,15 @@ describe('dataFrameToLogsModel', () => {
|
||||
type: FieldType.string,
|
||||
values: ['line1'],
|
||||
};
|
||||
const frame1 = new MutableDataFrame({
|
||||
const frame1 = createDataFrame({
|
||||
fields: [labels, time, line],
|
||||
});
|
||||
|
||||
const frame2 = new MutableDataFrame({
|
||||
const frame2 = createDataFrame({
|
||||
fields: [time, labels, line],
|
||||
});
|
||||
|
||||
const frame3 = new MutableDataFrame({
|
||||
const frame3 = createDataFrame({
|
||||
fields: [time, line, labels],
|
||||
});
|
||||
|
||||
@ -543,7 +583,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given one series with error should return expected logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new MutableDataFrame({
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
@ -619,7 +659,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given one series without labels should return expected logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new MutableDataFrame({
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
@ -910,7 +950,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('should return expected line limit meta info when returned number of series equal the log limit', () => {
|
||||
const series: DataFrame[] = [
|
||||
new MutableDataFrame({
|
||||
createDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
|
@ -40,6 +40,7 @@ import {
|
||||
toUtc,
|
||||
} from '@grafana/data';
|
||||
import { SIPrefix } from '@grafana/data/src/valueFormats/symbolFormatters';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { BarAlignment, GraphDrawStyle, StackingMode } from '@grafana/schema';
|
||||
import { ansicolor, colors } from '@grafana/ui';
|
||||
import { getThemeColor } from 'app/core/utils/colors';
|
||||
@ -532,7 +533,10 @@ export function logSeriesToLogsModel(logSeries: DataFrame[], queries: DataQuery[
|
||||
|
||||
// Used to add additional information to Line limit meta info
|
||||
function adjustMetaInfo(logsModel: LogsModel, visibleRangeMs?: number, requestedRangeMs?: number): LogsMetaItem[] {
|
||||
let logsModelMeta = [...logsModel.meta!];
|
||||
if (!logsModel.meta) {
|
||||
return [];
|
||||
}
|
||||
let logsModelMeta = [...logsModel.meta];
|
||||
|
||||
const limitIndex = logsModelMeta.findIndex((meta) => meta.label === LIMIT_LABEL);
|
||||
const limit = limitIndex >= 0 && logsModelMeta[limitIndex]?.value;
|
||||
@ -547,7 +551,8 @@ function adjustMetaInfo(logsModel: LogsModel, visibleRangeMs?: number, requested
|
||||
visibleRangeMs
|
||||
)}) of your selected time range (${rangeUtil.msRangeToTimeString(requestedRangeMs)})`;
|
||||
} else {
|
||||
metaLimitValue = `${limit} (${logsModel.rows.length} returned)`;
|
||||
const description = config.featureToggles.logsInfiniteScrolling ? 'displayed' : 'returned';
|
||||
metaLimitValue = `${limit} (${logsModel.rows.length} ${description})`;
|
||||
}
|
||||
|
||||
logsModelMeta[limitIndex] = {
|
||||
|
411
public/app/features/logs/response.test.ts
Normal file
411
public/app/features/logs/response.test.ts
Normal file
@ -0,0 +1,411 @@
|
||||
import { DataQueryResponse, QueryResultMetaStat } from '@grafana/data';
|
||||
import { getMockFrames } from 'app/plugins/datasource/loki/__mocks__/frames';
|
||||
|
||||
import { cloneQueryResponse, combineResponses } from './response';
|
||||
|
||||
describe('cloneQueryResponse', () => {
|
||||
const { logFrameA } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [logFrameA],
|
||||
};
|
||||
it('clones query responses', () => {
|
||||
const clonedA = cloneQueryResponse(responseA);
|
||||
expect(clonedA).not.toBe(responseA);
|
||||
expect(clonedA).toEqual(clonedA);
|
||||
});
|
||||
});
|
||||
|
||||
describe('combineResponses', () => {
|
||||
it('combines logs frames', () => {
|
||||
const { logFrameA, logFrameB } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [logFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [logFrameB],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
values: [1, 2, 3, 4],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Line',
|
||||
type: 'string',
|
||||
values: ['line3', 'line4', 'line1', 'line2'],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'labels',
|
||||
type: 'other',
|
||||
values: [
|
||||
{
|
||||
otherLabel: 'other value',
|
||||
},
|
||||
{
|
||||
label: 'value',
|
||||
},
|
||||
{
|
||||
otherLabel: 'other value',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'tsNs',
|
||||
type: 'string',
|
||||
values: ['1000000', '2000000', '3000000', '4000000'],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'id',
|
||||
type: 'string',
|
||||
values: ['id3', 'id4', 'id1', 'id2'],
|
||||
},
|
||||
],
|
||||
length: 4,
|
||||
meta: {
|
||||
custom: {
|
||||
frameType: 'LabeledTimeValues',
|
||||
},
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Summary: total bytes processed',
|
||||
unit: 'decbytes',
|
||||
value: 33,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('combines metric frames', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
values: [1000000, 2000000, 3000000, 4000000],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
values: [6, 7, 5, 4],
|
||||
labels: {
|
||||
level: 'debug',
|
||||
},
|
||||
},
|
||||
],
|
||||
length: 4,
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Summary: total bytes processed',
|
||||
unit: 'decbytes',
|
||||
value: 33,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('combines and identifies new frames in the response', () => {
|
||||
const { metricFrameA, metricFrameB, metricFrameC } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB, metricFrameC],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
values: [1000000, 2000000, 3000000, 4000000],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
values: [6, 7, 5, 4],
|
||||
labels: {
|
||||
level: 'debug',
|
||||
},
|
||||
},
|
||||
],
|
||||
length: 4,
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Summary: total bytes processed',
|
||||
unit: 'decbytes',
|
||||
value: 33,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
},
|
||||
metricFrameC,
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('combines frames prioritizing refIds over names', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const dataFrameA = {
|
||||
...metricFrameA,
|
||||
refId: 'A',
|
||||
name: 'A',
|
||||
};
|
||||
const dataFrameB = {
|
||||
...metricFrameB,
|
||||
refId: 'B',
|
||||
name: 'A',
|
||||
};
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [dataFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [dataFrameB],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [dataFrameA, dataFrameB],
|
||||
});
|
||||
});
|
||||
|
||||
it('combines frames in a new response instance', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
};
|
||||
expect(combineResponses(null, responseA)).not.toBe(responseA);
|
||||
expect(combineResponses(null, responseB)).not.toBe(responseB);
|
||||
});
|
||||
|
||||
it('combine when first param has errors', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const errorA = {
|
||||
message: 'errorA',
|
||||
};
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
error: errorA,
|
||||
errors: [errorA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
};
|
||||
|
||||
const combined = combineResponses(responseA, responseB);
|
||||
expect(combined.data[0].length).toBe(4);
|
||||
expect(combined.error?.message).toBe('errorA');
|
||||
expect(combined.errors).toHaveLength(1);
|
||||
expect(combined.errors?.[0]?.message).toBe('errorA');
|
||||
});
|
||||
|
||||
it('combine when second param has errors', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
};
|
||||
const errorB = {
|
||||
message: 'errorB',
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
error: errorB,
|
||||
errors: [errorB],
|
||||
};
|
||||
|
||||
const combined = combineResponses(responseA, responseB);
|
||||
expect(combined.data[0].length).toBe(4);
|
||||
expect(combined.error?.message).toBe('errorB');
|
||||
expect(combined.errors).toHaveLength(1);
|
||||
expect(combined.errors?.[0]?.message).toBe('errorB');
|
||||
});
|
||||
|
||||
it('combine when both params have errors', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const errorA = {
|
||||
message: 'errorA',
|
||||
};
|
||||
const errorB = {
|
||||
message: 'errorB',
|
||||
};
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
error: errorA,
|
||||
errors: [errorA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
error: errorB,
|
||||
errors: [errorB],
|
||||
};
|
||||
|
||||
const combined = combineResponses(responseA, responseB);
|
||||
expect(combined.data[0].length).toBe(4);
|
||||
expect(combined.error?.message).toBe('errorA');
|
||||
expect(combined.errors).toHaveLength(2);
|
||||
expect(combined.errors?.[0]?.message).toBe('errorA');
|
||||
expect(combined.errors?.[1]?.message).toBe('errorB');
|
||||
});
|
||||
|
||||
it('combines frames with nanoseconds', () => {
|
||||
const { logFrameA, logFrameB } = getMockFrames();
|
||||
logFrameA.fields[0].nanos = [333333, 444444];
|
||||
logFrameB.fields[0].nanos = [111111, 222222];
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [logFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [logFrameB],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
values: [1, 2, 3, 4],
|
||||
nanos: [111111, 222222, 333333, 444444],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Line',
|
||||
type: 'string',
|
||||
values: ['line3', 'line4', 'line1', 'line2'],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'labels',
|
||||
type: 'other',
|
||||
values: [
|
||||
{
|
||||
otherLabel: 'other value',
|
||||
},
|
||||
{
|
||||
label: 'value',
|
||||
},
|
||||
{
|
||||
otherLabel: 'other value',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'tsNs',
|
||||
type: 'string',
|
||||
values: ['1000000', '2000000', '3000000', '4000000'],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'id',
|
||||
type: 'string',
|
||||
values: ['id3', 'id4', 'id1', 'id2'],
|
||||
},
|
||||
],
|
||||
length: 4,
|
||||
meta: {
|
||||
custom: {
|
||||
frameType: 'LabeledTimeValues',
|
||||
},
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Summary: total bytes processed',
|
||||
unit: 'decbytes',
|
||||
value: 33,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
describe('combine stats', () => {
|
||||
const { metricFrameA } = getMockFrames();
|
||||
const makeResponse = (stats?: QueryResultMetaStat[]): DataQueryResponse => ({
|
||||
data: [
|
||||
{
|
||||
...metricFrameA,
|
||||
meta: {
|
||||
...metricFrameA.meta,
|
||||
stats,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
it('two values', () => {
|
||||
const responseA = makeResponse([
|
||||
{ displayName: 'Ingester: total reached', value: 1 },
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 11 },
|
||||
]);
|
||||
const responseB = makeResponse([
|
||||
{ displayName: 'Ingester: total reached', value: 2 },
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 22 },
|
||||
]);
|
||||
|
||||
expect(combineResponses(responseA, responseB).data[0].meta.stats).toStrictEqual([
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 33 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('one value', () => {
|
||||
const responseA = makeResponse([
|
||||
{ displayName: 'Ingester: total reached', value: 1 },
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 11 },
|
||||
]);
|
||||
const responseB = makeResponse();
|
||||
|
||||
expect(combineResponses(responseA, responseB).data[0].meta.stats).toStrictEqual([
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 11 },
|
||||
]);
|
||||
|
||||
expect(combineResponses(responseB, responseA).data[0].meta.stats).toStrictEqual([
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 11 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('no value', () => {
|
||||
const responseA = makeResponse();
|
||||
const responseB = makeResponse();
|
||||
expect(combineResponses(responseA, responseB).data[0].meta.stats).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
150
public/app/features/logs/response.ts
Normal file
150
public/app/features/logs/response.ts
Normal file
@ -0,0 +1,150 @@
|
||||
import {
|
||||
DataFrame,
|
||||
DataFrameType,
|
||||
DataQueryResponse,
|
||||
DataQueryResponseData,
|
||||
Field,
|
||||
FieldType,
|
||||
QueryResultMetaStat,
|
||||
shallowCompare,
|
||||
} from '@grafana/data';
|
||||
|
||||
export function combineResponses(currentResult: DataQueryResponse | null, newResult: DataQueryResponse) {
|
||||
if (!currentResult) {
|
||||
return cloneQueryResponse(newResult);
|
||||
}
|
||||
|
||||
newResult.data.forEach((newFrame) => {
|
||||
const currentFrame = currentResult.data.find((frame) => shouldCombine(frame, newFrame));
|
||||
if (!currentFrame) {
|
||||
currentResult.data.push(cloneDataFrame(newFrame));
|
||||
return;
|
||||
}
|
||||
combineFrames(currentFrame, newFrame);
|
||||
});
|
||||
|
||||
const mergedErrors = [...(currentResult.errors ?? []), ...(newResult.errors ?? [])];
|
||||
|
||||
// we make sure to have `.errors` as undefined, instead of empty-array
|
||||
// when no errors.
|
||||
|
||||
if (mergedErrors.length > 0) {
|
||||
currentResult.errors = mergedErrors;
|
||||
}
|
||||
|
||||
// the `.error` attribute is obsolete now,
|
||||
// but we have to maintain it, otherwise
|
||||
// some grafana parts do not behave well.
|
||||
// we just choose the old error, if it exists,
|
||||
// otherwise the new error, if it exists.
|
||||
const mergedError = currentResult.error ?? newResult.error;
|
||||
if (mergedError != null) {
|
||||
currentResult.error = mergedError;
|
||||
}
|
||||
|
||||
const mergedTraceIds = [...(currentResult.traceIds ?? []), ...(newResult.traceIds ?? [])];
|
||||
if (mergedTraceIds.length > 0) {
|
||||
currentResult.traceIds = mergedTraceIds;
|
||||
}
|
||||
|
||||
return currentResult;
|
||||
}
|
||||
|
||||
function combineFrames(dest: DataFrame, source: DataFrame) {
|
||||
const totalFields = dest.fields.length;
|
||||
for (let i = 0; i < totalFields; i++) {
|
||||
dest.fields[i].values = [].concat.apply(source.fields[i].values, dest.fields[i].values);
|
||||
if (source.fields[i].nanos) {
|
||||
const nanos: number[] = dest.fields[i].nanos?.slice() || [];
|
||||
dest.fields[i].nanos = source.fields[i].nanos?.concat(nanos);
|
||||
}
|
||||
}
|
||||
dest.length += source.length;
|
||||
dest.meta = {
|
||||
...dest.meta,
|
||||
stats: getCombinedMetadataStats(dest.meta?.stats ?? [], source.meta?.stats ?? []),
|
||||
};
|
||||
}
|
||||
|
||||
const TOTAL_BYTES_STAT = 'Summary: total bytes processed';
|
||||
|
||||
function getCombinedMetadataStats(
|
||||
destStats: QueryResultMetaStat[],
|
||||
sourceStats: QueryResultMetaStat[]
|
||||
): QueryResultMetaStat[] {
|
||||
// in the current approach, we only handle a single stat
|
||||
const destStat = destStats.find((s) => s.displayName === TOTAL_BYTES_STAT);
|
||||
const sourceStat = sourceStats.find((s) => s.displayName === TOTAL_BYTES_STAT);
|
||||
|
||||
if (sourceStat != null && destStat != null) {
|
||||
return [{ value: sourceStat.value + destStat.value, displayName: TOTAL_BYTES_STAT, unit: destStat.unit }];
|
||||
}
|
||||
|
||||
// maybe one of them exist
|
||||
const eitherStat = sourceStat ?? destStat;
|
||||
if (eitherStat != null) {
|
||||
return [eitherStat];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Deep clones a DataQueryResponse
|
||||
*/
|
||||
export function cloneQueryResponse(response: DataQueryResponse): DataQueryResponse {
|
||||
const newResponse = {
|
||||
...response,
|
||||
data: response.data.map(cloneDataFrame),
|
||||
};
|
||||
return newResponse;
|
||||
}
|
||||
|
||||
function cloneDataFrame(frame: DataQueryResponseData): DataQueryResponseData {
|
||||
return {
|
||||
...frame,
|
||||
fields: frame.fields.map((field: Field) => ({
|
||||
...field,
|
||||
values: field.values,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
function shouldCombine(frame1: DataFrame, frame2: DataFrame): boolean {
|
||||
if (frame1.refId !== frame2.refId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const frameType1 = frame1.meta?.type;
|
||||
const frameType2 = frame2.meta?.type;
|
||||
|
||||
if (frameType1 !== frameType2) {
|
||||
// we do not join things that have a different type
|
||||
return false;
|
||||
}
|
||||
|
||||
// metric range query data
|
||||
if (frameType1 === DataFrameType.TimeSeriesMulti) {
|
||||
const field1 = frame1.fields.find((f) => f.type === FieldType.number);
|
||||
const field2 = frame2.fields.find((f) => f.type === FieldType.number);
|
||||
if (field1 === undefined || field2 === undefined) {
|
||||
// should never happen
|
||||
return false;
|
||||
}
|
||||
|
||||
return shallowCompare(field1.labels ?? {}, field2.labels ?? {});
|
||||
}
|
||||
|
||||
// logs query data
|
||||
// logs use a special attribute in the dataframe's "custom" section
|
||||
// because we do not have a good "frametype" value for them yet.
|
||||
const customType1 = frame1.meta?.custom?.frameType;
|
||||
const customType2 = frame2.meta?.custom?.frameType;
|
||||
|
||||
if (customType1 === 'LabeledTimeValues' && customType2 === 'LabeledTimeValues') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// should never reach here
|
||||
return false;
|
||||
}
|
@ -14,12 +14,12 @@ import {
|
||||
TimeRange,
|
||||
LoadingState,
|
||||
} from '@grafana/data';
|
||||
import { combineResponses } from 'app/features/logs/response';
|
||||
|
||||
import { LokiDatasource } from './datasource';
|
||||
import { splitTimeRange as splitLogsTimeRange } from './logsTimeSplitting';
|
||||
import { splitTimeRange as splitMetricTimeRange } from './metricTimeSplitting';
|
||||
import { isLogsQuery, isQueryWithRangeVariable } from './queryUtils';
|
||||
import { combineResponses } from './responseUtils';
|
||||
import { trackGroupedQueries } from './tracking';
|
||||
import { LokiGroupedRequest, LokiQuery, LokiQueryType } from './types';
|
||||
|
||||
|
@ -1,8 +1,7 @@
|
||||
import { cloneDeep } from 'lodash';
|
||||
|
||||
import { DataQueryResponse, QueryResultMetaStat, DataFrame, FieldType } from '@grafana/data';
|
||||
import { DataFrame, FieldType } from '@grafana/data';
|
||||
|
||||
import { getMockFrames } from './__mocks__/frames';
|
||||
import {
|
||||
dataFrameHasLevelLabel,
|
||||
dataFrameHasLokiError,
|
||||
@ -10,8 +9,6 @@ import {
|
||||
extractLogParserFromDataFrame,
|
||||
extractLabelKeysFromDataFrame,
|
||||
extractUnwrapLabelKeysFromDataFrame,
|
||||
cloneQueryResponse,
|
||||
combineResponses,
|
||||
} from './responseUtils';
|
||||
import { LabelType } from './types';
|
||||
|
||||
@ -169,410 +166,3 @@ describe('extractUnwrapLabelKeysFromDataFrame', () => {
|
||||
expect(extractUnwrapLabelKeysFromDataFrame(input)).toEqual(['number']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cloneQueryResponse', () => {
|
||||
const { logFrameA } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [logFrameA],
|
||||
};
|
||||
it('clones query responses', () => {
|
||||
const clonedA = cloneQueryResponse(responseA);
|
||||
expect(clonedA).not.toBe(responseA);
|
||||
expect(clonedA).toEqual(clonedA);
|
||||
});
|
||||
});
|
||||
|
||||
describe('combineResponses', () => {
|
||||
it('combines logs frames', () => {
|
||||
const { logFrameA, logFrameB } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [logFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [logFrameB],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
values: [1, 2, 3, 4],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Line',
|
||||
type: 'string',
|
||||
values: ['line3', 'line4', 'line1', 'line2'],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'labels',
|
||||
type: 'other',
|
||||
values: [
|
||||
{
|
||||
otherLabel: 'other value',
|
||||
},
|
||||
{
|
||||
label: 'value',
|
||||
},
|
||||
{
|
||||
otherLabel: 'other value',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'tsNs',
|
||||
type: 'string',
|
||||
values: ['1000000', '2000000', '3000000', '4000000'],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'id',
|
||||
type: 'string',
|
||||
values: ['id3', 'id4', 'id1', 'id2'],
|
||||
},
|
||||
],
|
||||
length: 4,
|
||||
meta: {
|
||||
custom: {
|
||||
frameType: 'LabeledTimeValues',
|
||||
},
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Summary: total bytes processed',
|
||||
unit: 'decbytes',
|
||||
value: 33,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('combines metric frames', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
values: [1000000, 2000000, 3000000, 4000000],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
values: [6, 7, 5, 4],
|
||||
labels: {
|
||||
level: 'debug',
|
||||
},
|
||||
},
|
||||
],
|
||||
length: 4,
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Summary: total bytes processed',
|
||||
unit: 'decbytes',
|
||||
value: 33,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('combines and identifies new frames in the response', () => {
|
||||
const { metricFrameA, metricFrameB, metricFrameC } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB, metricFrameC],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
values: [1000000, 2000000, 3000000, 4000000],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
values: [6, 7, 5, 4],
|
||||
labels: {
|
||||
level: 'debug',
|
||||
},
|
||||
},
|
||||
],
|
||||
length: 4,
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Summary: total bytes processed',
|
||||
unit: 'decbytes',
|
||||
value: 33,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
},
|
||||
metricFrameC,
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('combines frames prioritizing refIds over names', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const dataFrameA = {
|
||||
...metricFrameA,
|
||||
refId: 'A',
|
||||
name: 'A',
|
||||
};
|
||||
const dataFrameB = {
|
||||
...metricFrameB,
|
||||
refId: 'B',
|
||||
name: 'A',
|
||||
};
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [dataFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [dataFrameB],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [dataFrameA, dataFrameB],
|
||||
});
|
||||
});
|
||||
|
||||
it('combines frames in a new response instance', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
};
|
||||
expect(combineResponses(null, responseA)).not.toBe(responseA);
|
||||
expect(combineResponses(null, responseB)).not.toBe(responseB);
|
||||
});
|
||||
|
||||
it('combine when first param has errors', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const errorA = {
|
||||
message: 'errorA',
|
||||
};
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
error: errorA,
|
||||
errors: [errorA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
};
|
||||
|
||||
const combined = combineResponses(responseA, responseB);
|
||||
expect(combined.data[0].length).toBe(4);
|
||||
expect(combined.error?.message).toBe('errorA');
|
||||
expect(combined.errors).toHaveLength(1);
|
||||
expect(combined.errors?.[0]?.message).toBe('errorA');
|
||||
});
|
||||
|
||||
it('combine when second param has errors', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
};
|
||||
const errorB = {
|
||||
message: 'errorB',
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
error: errorB,
|
||||
errors: [errorB],
|
||||
};
|
||||
|
||||
const combined = combineResponses(responseA, responseB);
|
||||
expect(combined.data[0].length).toBe(4);
|
||||
expect(combined.error?.message).toBe('errorB');
|
||||
expect(combined.errors).toHaveLength(1);
|
||||
expect(combined.errors?.[0]?.message).toBe('errorB');
|
||||
});
|
||||
|
||||
it('combine when both params have errors', () => {
|
||||
const { metricFrameA, metricFrameB } = getMockFrames();
|
||||
const errorA = {
|
||||
message: 'errorA',
|
||||
};
|
||||
const errorB = {
|
||||
message: 'errorB',
|
||||
};
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [metricFrameA],
|
||||
error: errorA,
|
||||
errors: [errorA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [metricFrameB],
|
||||
error: errorB,
|
||||
errors: [errorB],
|
||||
};
|
||||
|
||||
const combined = combineResponses(responseA, responseB);
|
||||
expect(combined.data[0].length).toBe(4);
|
||||
expect(combined.error?.message).toBe('errorA');
|
||||
expect(combined.errors).toHaveLength(2);
|
||||
expect(combined.errors?.[0]?.message).toBe('errorA');
|
||||
expect(combined.errors?.[1]?.message).toBe('errorB');
|
||||
});
|
||||
|
||||
it('combines frames with nanoseconds', () => {
|
||||
const { logFrameA, logFrameB } = getMockFrames();
|
||||
logFrameA.fields[0].nanos = [333333, 444444];
|
||||
logFrameB.fields[0].nanos = [111111, 222222];
|
||||
const responseA: DataQueryResponse = {
|
||||
data: [logFrameA],
|
||||
};
|
||||
const responseB: DataQueryResponse = {
|
||||
data: [logFrameB],
|
||||
};
|
||||
expect(combineResponses(responseA, responseB)).toEqual({
|
||||
data: [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
values: [1, 2, 3, 4],
|
||||
nanos: [111111, 222222, 333333, 444444],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'Line',
|
||||
type: 'string',
|
||||
values: ['line3', 'line4', 'line1', 'line2'],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'labels',
|
||||
type: 'other',
|
||||
values: [
|
||||
{
|
||||
otherLabel: 'other value',
|
||||
},
|
||||
{
|
||||
label: 'value',
|
||||
},
|
||||
{
|
||||
otherLabel: 'other value',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'tsNs',
|
||||
type: 'string',
|
||||
values: ['1000000', '2000000', '3000000', '4000000'],
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
name: 'id',
|
||||
type: 'string',
|
||||
values: ['id3', 'id4', 'id1', 'id2'],
|
||||
},
|
||||
],
|
||||
length: 4,
|
||||
meta: {
|
||||
custom: {
|
||||
frameType: 'LabeledTimeValues',
|
||||
},
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Summary: total bytes processed',
|
||||
unit: 'decbytes',
|
||||
value: 33,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
describe('combine stats', () => {
|
||||
const { metricFrameA } = getMockFrames();
|
||||
const makeResponse = (stats?: QueryResultMetaStat[]): DataQueryResponse => ({
|
||||
data: [
|
||||
{
|
||||
...metricFrameA,
|
||||
meta: {
|
||||
...metricFrameA.meta,
|
||||
stats,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
it('two values', () => {
|
||||
const responseA = makeResponse([
|
||||
{ displayName: 'Ingester: total reached', value: 1 },
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 11 },
|
||||
]);
|
||||
const responseB = makeResponse([
|
||||
{ displayName: 'Ingester: total reached', value: 2 },
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 22 },
|
||||
]);
|
||||
|
||||
expect(combineResponses(responseA, responseB).data[0].meta.stats).toStrictEqual([
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 33 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('one value', () => {
|
||||
const responseA = makeResponse([
|
||||
{ displayName: 'Ingester: total reached', value: 1 },
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 11 },
|
||||
]);
|
||||
const responseB = makeResponse();
|
||||
|
||||
expect(combineResponses(responseA, responseB).data[0].meta.stats).toStrictEqual([
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 11 },
|
||||
]);
|
||||
|
||||
expect(combineResponses(responseB, responseA).data[0].meta.stats).toStrictEqual([
|
||||
{ displayName: 'Summary: total bytes processed', unit: 'decbytes', value: 11 },
|
||||
]);
|
||||
});
|
||||
|
||||
it('no value', () => {
|
||||
const responseA = makeResponse();
|
||||
const responseB = makeResponse();
|
||||
expect(combineResponses(responseA, responseB).data[0].meta.stats).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,15 +1,4 @@
|
||||
import {
|
||||
DataFrame,
|
||||
DataFrameType,
|
||||
DataQueryResponse,
|
||||
DataQueryResponseData,
|
||||
Field,
|
||||
FieldType,
|
||||
isValidGoDuration,
|
||||
Labels,
|
||||
QueryResultMetaStat,
|
||||
shallowCompare,
|
||||
} from '@grafana/data';
|
||||
import { DataFrame, FieldType, isValidGoDuration, Labels } from '@grafana/data';
|
||||
|
||||
import { isBytesString } from './languageUtils';
|
||||
import { isLogLineJSON, isLogLineLogfmt, isLogLinePacked } from './lineParser';
|
||||
@ -133,143 +122,3 @@ export function extractLevelLikeLabelFromDataFrame(frame: DataFrame): string | n
|
||||
}
|
||||
return levelLikeLabel;
|
||||
}
|
||||
|
||||
function shouldCombine(frame1: DataFrame, frame2: DataFrame): boolean {
|
||||
if (frame1.refId !== frame2.refId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const frameType1 = frame1.meta?.type;
|
||||
const frameType2 = frame2.meta?.type;
|
||||
|
||||
if (frameType1 !== frameType2) {
|
||||
// we do not join things that have a different type
|
||||
return false;
|
||||
}
|
||||
|
||||
// metric range query data
|
||||
if (frameType1 === DataFrameType.TimeSeriesMulti) {
|
||||
const field1 = frame1.fields.find((f) => f.type === FieldType.number);
|
||||
const field2 = frame2.fields.find((f) => f.type === FieldType.number);
|
||||
if (field1 === undefined || field2 === undefined) {
|
||||
// should never happen
|
||||
return false;
|
||||
}
|
||||
|
||||
return shallowCompare(field1.labels ?? {}, field2.labels ?? {});
|
||||
}
|
||||
|
||||
// logs query data
|
||||
// logs use a special attribute in the dataframe's "custom" section
|
||||
// because we do not have a good "frametype" value for them yet.
|
||||
const customType1 = frame1.meta?.custom?.frameType;
|
||||
const customType2 = frame2.meta?.custom?.frameType;
|
||||
|
||||
if (customType1 === 'LabeledTimeValues' && customType2 === 'LabeledTimeValues') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// should never reach here
|
||||
return false;
|
||||
}
|
||||
|
||||
export function combineResponses(currentResult: DataQueryResponse | null, newResult: DataQueryResponse) {
|
||||
if (!currentResult) {
|
||||
return cloneQueryResponse(newResult);
|
||||
}
|
||||
|
||||
newResult.data.forEach((newFrame) => {
|
||||
const currentFrame = currentResult.data.find((frame) => shouldCombine(frame, newFrame));
|
||||
if (!currentFrame) {
|
||||
currentResult.data.push(cloneDataFrame(newFrame));
|
||||
return;
|
||||
}
|
||||
combineFrames(currentFrame, newFrame);
|
||||
});
|
||||
|
||||
const mergedErrors = [...(currentResult.errors ?? []), ...(newResult.errors ?? [])];
|
||||
|
||||
// we make sure to have `.errors` as undefined, instead of empty-array
|
||||
// when no errors.
|
||||
|
||||
if (mergedErrors.length > 0) {
|
||||
currentResult.errors = mergedErrors;
|
||||
}
|
||||
|
||||
// the `.error` attribute is obsolete now,
|
||||
// but we have to maintain it, otherwise
|
||||
// some grafana parts do not behave well.
|
||||
// we just choose the old error, if it exists,
|
||||
// otherwise the new error, if it exists.
|
||||
const mergedError = currentResult.error ?? newResult.error;
|
||||
if (mergedError != null) {
|
||||
currentResult.error = mergedError;
|
||||
}
|
||||
|
||||
const mergedTraceIds = [...(currentResult.traceIds ?? []), ...(newResult.traceIds ?? [])];
|
||||
if (mergedTraceIds.length > 0) {
|
||||
currentResult.traceIds = mergedTraceIds;
|
||||
}
|
||||
|
||||
return currentResult;
|
||||
}
|
||||
|
||||
function combineFrames(dest: DataFrame, source: DataFrame) {
|
||||
const totalFields = dest.fields.length;
|
||||
for (let i = 0; i < totalFields; i++) {
|
||||
dest.fields[i].values = [].concat.apply(source.fields[i].values, dest.fields[i].values);
|
||||
if (source.fields[i].nanos) {
|
||||
const nanos: number[] = dest.fields[i].nanos?.slice() || [];
|
||||
dest.fields[i].nanos = source.fields[i].nanos?.concat(nanos);
|
||||
}
|
||||
}
|
||||
dest.length += source.length;
|
||||
dest.meta = {
|
||||
...dest.meta,
|
||||
stats: getCombinedMetadataStats(dest.meta?.stats ?? [], source.meta?.stats ?? []),
|
||||
};
|
||||
}
|
||||
|
||||
const TOTAL_BYTES_STAT = 'Summary: total bytes processed';
|
||||
|
||||
function getCombinedMetadataStats(
|
||||
destStats: QueryResultMetaStat[],
|
||||
sourceStats: QueryResultMetaStat[]
|
||||
): QueryResultMetaStat[] {
|
||||
// in the current approach, we only handle a single stat
|
||||
const destStat = destStats.find((s) => s.displayName === TOTAL_BYTES_STAT);
|
||||
const sourceStat = sourceStats.find((s) => s.displayName === TOTAL_BYTES_STAT);
|
||||
|
||||
if (sourceStat != null && destStat != null) {
|
||||
return [{ value: sourceStat.value + destStat.value, displayName: TOTAL_BYTES_STAT, unit: destStat.unit }];
|
||||
}
|
||||
|
||||
// maybe one of them exist
|
||||
const eitherStat = sourceStat ?? destStat;
|
||||
if (eitherStat != null) {
|
||||
return [eitherStat];
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Deep clones a DataQueryResponse
|
||||
*/
|
||||
export function cloneQueryResponse(response: DataQueryResponse): DataQueryResponse {
|
||||
const newResponse = {
|
||||
...response,
|
||||
data: response.data.map(cloneDataFrame),
|
||||
};
|
||||
return newResponse;
|
||||
}
|
||||
|
||||
function cloneDataFrame(frame: DataQueryResponseData): DataQueryResponseData {
|
||||
return {
|
||||
...frame,
|
||||
fields: frame.fields.map((field: Field) => ({
|
||||
...field,
|
||||
values: field.values,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user