mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Explore: Add caching for queries run from logs navigation (#34297)
* WIP: Implement simple caching * If results are cached, don't run new query and use those results * Add duplicate key check * Clean up * Clean up * Add tests for caching * Remove unused variables * Update public/app/features/explore/state/query.test.ts Co-authored-by: Piotr Jamróz <pm.jamroz@gmail.com> * Update public/app/features/explore/state/query.test.ts Co-authored-by: Piotr Jamróz <pm.jamroz@gmail.com> * Use decorateData to apply all decorators * Remove unused variables * Change loading stte to Done * Clear cache when running query from navigation Co-authored-by: Piotr Jamróz <pm.jamroz@gmail.com>
This commit is contained in:
parent
b5de6e7a1d
commit
247bdc2f9b
@ -19,7 +19,7 @@ import { ExploreTimeControls } from './ExploreTimeControls';
|
||||
import { LiveTailButton } from './LiveTailButton';
|
||||
import { RunButton } from './RunButton';
|
||||
import { LiveTailControls } from './useLiveTailControls';
|
||||
import { cancelQueries, clearQueries, runQueries } from './state/query';
|
||||
import { cancelQueries, clearQueries, runQueries, clearCache } from './state/query';
|
||||
import ReturnToDashboardButton from './ReturnToDashboardButton';
|
||||
import { isSplit } from './state/selectors';
|
||||
|
||||
@ -54,6 +54,7 @@ interface DispatchProps {
|
||||
syncTimes: typeof syncTimes;
|
||||
changeRefreshInterval: typeof changeRefreshInterval;
|
||||
onChangeTimeZone: typeof updateTimeZoneForSession;
|
||||
clearCache: typeof clearCache;
|
||||
}
|
||||
|
||||
type Props = StateProps & DispatchProps & OwnProps;
|
||||
@ -68,10 +69,13 @@ export class UnConnectedExploreToolbar extends PureComponent<Props> {
|
||||
};
|
||||
|
||||
onRunQuery = (loading = false) => {
|
||||
const { clearCache, runQueries, cancelQueries, exploreId } = this.props;
|
||||
if (loading) {
|
||||
return this.props.cancelQueries(this.props.exploreId);
|
||||
return cancelQueries(exploreId);
|
||||
} else {
|
||||
return this.props.runQueries(this.props.exploreId);
|
||||
// We want to give user a chance tu re-run the query even if it is saved in cache
|
||||
clearCache(exploreId);
|
||||
return runQueries(exploreId);
|
||||
}
|
||||
};
|
||||
|
||||
@ -274,6 +278,7 @@ const mapDispatchToProps: DispatchProps = {
|
||||
split: splitOpen,
|
||||
syncTimes,
|
||||
onChangeTimeZone: updateTimeZoneForSession,
|
||||
clearCache,
|
||||
};
|
||||
|
||||
export const ExploreToolbar = hot(module)(connect(mapStateToProps, mapDispatchToProps)(UnConnectedExploreToolbar));
|
||||
|
@ -65,6 +65,8 @@ interface Props {
|
||||
onStopScanning?: () => void;
|
||||
getRowContext?: (row: LogRowModel, options?: RowContextOptions) => Promise<any>;
|
||||
getFieldLinks: (field: Field, rowIndex: number) => Array<LinkModel<Field>>;
|
||||
addResultsToCache: () => void;
|
||||
clearCache: () => void;
|
||||
}
|
||||
|
||||
interface State {
|
||||
@ -244,6 +246,8 @@ export class UnthemedLogs extends PureComponent<Props, State> {
|
||||
getFieldLinks,
|
||||
theme,
|
||||
logsQueries,
|
||||
clearCache,
|
||||
addResultsToCache,
|
||||
} = this.props;
|
||||
|
||||
const {
|
||||
@ -361,6 +365,8 @@ export class UnthemedLogs extends PureComponent<Props, State> {
|
||||
loading={loading}
|
||||
queries={logsQueries ?? []}
|
||||
scrollToTopLogs={this.scrollToTopLogs}
|
||||
addResultsToCache={addResultsToCache}
|
||||
clearCache={clearCache}
|
||||
/>
|
||||
</div>
|
||||
{!loading && !hasData && !scanning && (
|
||||
|
@ -7,6 +7,7 @@ import { AbsoluteTimeRange, Field, LogRowModel, RawTimeRange } from '@grafana/da
|
||||
import { ExploreId, ExploreItemState } from 'app/types/explore';
|
||||
import { StoreState } from 'app/types';
|
||||
import { splitOpen } from './state/main';
|
||||
import { addResultsToCache, clearCache } from './state/query';
|
||||
import { updateTimeRange } from './state/time';
|
||||
import { getTimeZone } from '../profile/state/selectors';
|
||||
import { LiveLogsWithTheme } from './LiveLogs';
|
||||
@ -15,7 +16,7 @@ import { LogsCrossFadeTransition } from './utils/LogsCrossFadeTransition';
|
||||
import { LiveTailControls } from './useLiveTailControls';
|
||||
import { getFieldLinksForExplore } from './utils/links';
|
||||
|
||||
interface LogsContainerProps {
|
||||
interface LogsContainerProps extends PropsFromRedux {
|
||||
exploreId: ExploreId;
|
||||
scanRange?: RawTimeRange;
|
||||
width: number;
|
||||
@ -26,7 +27,7 @@ interface LogsContainerProps {
|
||||
onStopScanning: () => void;
|
||||
}
|
||||
|
||||
export class LogsContainer extends PureComponent<PropsFromRedux & LogsContainerProps> {
|
||||
export class LogsContainer extends PureComponent<LogsContainerProps> {
|
||||
onChangeTime = (absoluteRange: AbsoluteTimeRange) => {
|
||||
const { exploreId, updateTimeRange } = this.props;
|
||||
updateTimeRange({ exploreId, absoluteRange });
|
||||
@ -77,6 +78,8 @@ export class LogsContainer extends PureComponent<PropsFromRedux & LogsContainerP
|
||||
width,
|
||||
isLive,
|
||||
exploreId,
|
||||
addResultsToCache,
|
||||
clearCache,
|
||||
} = this.props;
|
||||
|
||||
if (!logRows) {
|
||||
@ -134,6 +137,8 @@ export class LogsContainer extends PureComponent<PropsFromRedux & LogsContainerP
|
||||
width={width}
|
||||
getRowContext={this.getLogRowContext}
|
||||
getFieldLinks={this.getFieldLinks}
|
||||
addResultsToCache={() => addResultsToCache(exploreId)}
|
||||
clearCache={() => clearCache(exploreId)}
|
||||
/>
|
||||
</Collapse>
|
||||
</LogsCrossFadeTransition>
|
||||
@ -180,6 +185,8 @@ function mapStateToProps(state: StoreState, { exploreId }: { exploreId: string }
|
||||
const mapDispatchToProps = {
|
||||
updateTimeRange,
|
||||
splitOpen,
|
||||
addResultsToCache,
|
||||
clearCache,
|
||||
};
|
||||
|
||||
const connector = connect(mapStateToProps, mapDispatchToProps);
|
||||
|
@ -15,6 +15,8 @@ const setup = (propOverrides?: object) => {
|
||||
visibleRange: { from: 1619081941000, to: 1619081945930 },
|
||||
onChangeTime: jest.fn(),
|
||||
scrollToTopLogs: jest.fn(),
|
||||
addResultsToCache: jest.fn(),
|
||||
clearCache: jest.fn(),
|
||||
...propOverrides,
|
||||
};
|
||||
|
||||
|
@ -14,6 +14,8 @@ type Props = {
|
||||
logsSortOrder?: LogsSortOrder | null;
|
||||
onChangeTime: (range: AbsoluteTimeRange) => void;
|
||||
scrollToTopLogs: () => void;
|
||||
addResultsToCache: () => void;
|
||||
clearCache: () => void;
|
||||
};
|
||||
|
||||
export type LogsPage = {
|
||||
@ -30,6 +32,8 @@ function LogsNavigation({
|
||||
scrollToTopLogs,
|
||||
visibleRange,
|
||||
queries,
|
||||
clearCache,
|
||||
addResultsToCache,
|
||||
}: Props) {
|
||||
const [pages, setPages] = useState<LogsPage[]>([]);
|
||||
const [currentPageIndex, setCurrentPageIndex] = useState(0);
|
||||
@ -53,6 +57,7 @@ function LogsNavigation({
|
||||
let newPages: LogsPage[] = [];
|
||||
// We want to start new pagination if queries change or if absolute range is different than expected
|
||||
if (!isEqual(expectedRangeRef.current, absoluteRange) || !isEqual(expectedQueriesRef.current, queries)) {
|
||||
clearCache();
|
||||
setPages([newPage]);
|
||||
setCurrentPageIndex(0);
|
||||
expectedQueriesRef.current = queries;
|
||||
@ -72,7 +77,14 @@ function LogsNavigation({
|
||||
const index = newPages.findIndex((page) => page.queryRange.to === absoluteRange.to);
|
||||
setCurrentPageIndex(index);
|
||||
}
|
||||
}, [visibleRange, absoluteRange, logsSortOrder, queries]);
|
||||
addResultsToCache();
|
||||
}, [visibleRange, absoluteRange, logsSortOrder, queries, clearCache, addResultsToCache]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => clearCache();
|
||||
// We can't enforce the eslint rule here because we only want to run when component unmounts.
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const changeTime = ({ from, to }: AbsoluteTimeRange) => {
|
||||
expectedRangeRef.current = { from, to };
|
||||
|
@ -37,6 +37,7 @@ const defaultInitialState = {
|
||||
label: 'Off',
|
||||
value: 0,
|
||||
},
|
||||
cache: [],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -238,6 +238,7 @@ export const paneReducer = (state: ExploreItemState = makeExplorePaneState(), ac
|
||||
datasourceMissing: !datasourceInstance,
|
||||
queryResponse: createEmptyQueryResponse(),
|
||||
logsHighlighterExpressions: undefined,
|
||||
cache: [],
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
import {
|
||||
addQueryRowAction,
|
||||
addResultsToCache,
|
||||
clearCache,
|
||||
cancelQueries,
|
||||
cancelQueriesAction,
|
||||
queryReducer,
|
||||
@ -10,7 +12,17 @@ import {
|
||||
} from './query';
|
||||
import { ExploreId, ExploreItemState } from 'app/types';
|
||||
import { interval, of } from 'rxjs';
|
||||
import { ArrayVector, DataQueryResponse, DefaultTimeZone, MutableDataFrame, RawTimeRange, toUtc } from '@grafana/data';
|
||||
import {
|
||||
ArrayVector,
|
||||
DataQueryResponse,
|
||||
DefaultTimeZone,
|
||||
MutableDataFrame,
|
||||
RawTimeRange,
|
||||
toUtc,
|
||||
PanelData,
|
||||
DataFrame,
|
||||
LoadingState,
|
||||
} from '@grafana/data';
|
||||
import { thunkTester } from 'test/core/thunk/thunkTester';
|
||||
import { makeExplorePaneState } from './utils';
|
||||
import { reducerTester } from '../../../../test/core/redux/reducerTester';
|
||||
@ -50,6 +62,7 @@ const defaultInitialState = {
|
||||
label: 'Off',
|
||||
value: 0,
|
||||
},
|
||||
cache: [],
|
||||
},
|
||||
},
|
||||
};
|
||||
@ -213,4 +226,95 @@ describe('reducer', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('caching', () => {
|
||||
it('should add response to cache', async () => {
|
||||
const store = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
queryResponse: {
|
||||
series: [{ name: 'test name' }] as DataFrame[],
|
||||
state: LoadingState.Done,
|
||||
} as PanelData,
|
||||
absoluteRange: { from: 1621348027000, to: 1621348050000 },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await store.dispatch(addResultsToCache(ExploreId.left));
|
||||
|
||||
expect(store.getState().explore[ExploreId.left].cache).toEqual([
|
||||
{ key: 'from=1621348027000&to=1621348050000', value: { series: [{ name: 'test name' }], state: 'Done' } },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not add response to cache if response is still loading', async () => {
|
||||
const store = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
queryResponse: { series: [{ name: 'test name' }] as DataFrame[], state: LoadingState.Loading } as PanelData,
|
||||
absoluteRange: { from: 1621348027000, to: 1621348050000 },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await store.dispatch(addResultsToCache(ExploreId.left));
|
||||
|
||||
expect(store.getState().explore[ExploreId.left].cache).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not add duplicate response to cache', async () => {
|
||||
const store = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
queryResponse: {
|
||||
series: [{ name: 'test name' }] as DataFrame[],
|
||||
state: LoadingState.Done,
|
||||
} as PanelData,
|
||||
absoluteRange: { from: 1621348027000, to: 1621348050000 },
|
||||
cache: [
|
||||
{
|
||||
key: 'from=1621348027000&to=1621348050000',
|
||||
value: { series: [{ name: 'old test name' }], state: LoadingState.Done },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await store.dispatch(addResultsToCache(ExploreId.left));
|
||||
|
||||
expect(store.getState().explore[ExploreId.left].cache).toHaveLength(1);
|
||||
expect(store.getState().explore[ExploreId.left].cache).toEqual([
|
||||
{ key: 'from=1621348027000&to=1621348050000', value: { series: [{ name: 'old test name' }], state: 'Done' } },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should clear cache', async () => {
|
||||
const store = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
cache: [
|
||||
{
|
||||
key: 'from=1621348027000&to=1621348050000',
|
||||
value: { series: [{ name: 'old test name' }], state: 'Done' },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await store.dispatch(clearCache(ExploreId.left));
|
||||
|
||||
expect(store.getState().explore[ExploreId.left].cache).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { map, mergeMap, throttleTime } from 'rxjs/operators';
|
||||
import { identity, Unsubscribable } from 'rxjs';
|
||||
import { mergeMap, throttleTime } from 'rxjs/operators';
|
||||
import { identity, Unsubscribable, of } from 'rxjs';
|
||||
import {
|
||||
DataQuery,
|
||||
DataQueryErrorType,
|
||||
@ -27,19 +27,14 @@ import { ExploreId, QueryOptions } from 'app/types/explore';
|
||||
import { getTimeZone } from 'app/features/profile/state/selectors';
|
||||
import { getShiftedTimeRange } from 'app/core/utils/timePicker';
|
||||
import { notifyApp } from '../../../core/actions';
|
||||
import { preProcessPanelData, runRequest } from '../../query/state/runRequest';
|
||||
import {
|
||||
decorateWithFrameTypeMetadata,
|
||||
decorateWithGraphResult,
|
||||
decorateWithLogsResult,
|
||||
decorateWithTableResult,
|
||||
} from '../utils/decorators';
|
||||
import { runRequest } from '../../query/state/runRequest';
|
||||
import { decorateData } from '../utils/decorators';
|
||||
import { createErrorNotification } from '../../../core/copy/appNotification';
|
||||
import { richHistoryUpdatedAction, stateSave } from './main';
|
||||
import { AnyAction, createAction, PayloadAction } from '@reduxjs/toolkit';
|
||||
import { updateTime } from './time';
|
||||
import { historyUpdatedAction } from './history';
|
||||
import { createEmptyQueryResponse } from './utils';
|
||||
import { createEmptyQueryResponse, createCacheKey, getResultsFromCache } from './utils';
|
||||
|
||||
//
|
||||
// Actions and Payloads
|
||||
@ -164,6 +159,24 @@ export interface ScanStopPayload {
|
||||
}
|
||||
export const scanStopAction = createAction<ScanStopPayload>('explore/scanStop');
|
||||
|
||||
/**
|
||||
* Adds query results to cache.
|
||||
* This is currently used to cache last 5 query results for log queries run from logs navigation (pagination).
|
||||
*/
|
||||
export interface AddResultsToCachePayload {
|
||||
exploreId: ExploreId;
|
||||
cacheKey: string;
|
||||
queryResponse: PanelData;
|
||||
}
|
||||
export const addResultsToCacheAction = createAction<AddResultsToCachePayload>('explore/addResultsToCache');
|
||||
|
||||
/**
|
||||
* Clears cache.
|
||||
*/
|
||||
export interface ClearCachePayload {
|
||||
exploreId: ExploreId;
|
||||
}
|
||||
export const clearCacheAction = createAction<ClearCachePayload>('explore/clearCache');
|
||||
//
|
||||
// Action creators
|
||||
//
|
||||
@ -309,100 +322,115 @@ export const runQueries = (exploreId: ExploreId, options?: { replaceUrl?: boolea
|
||||
history,
|
||||
refreshInterval,
|
||||
absoluteRange,
|
||||
cache,
|
||||
} = exploreItemState;
|
||||
let newQuerySub;
|
||||
|
||||
if (!hasNonEmptyQuery(queries)) {
|
||||
dispatch(clearQueriesAction({ exploreId }));
|
||||
dispatch(stateSave({ replace: options?.replaceUrl })); // Remember to save to state and update location
|
||||
return;
|
||||
}
|
||||
const cachedValue = getResultsFromCache(cache, absoluteRange);
|
||||
|
||||
if (!datasourceInstance) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Some datasource's query builders allow per-query interval limits,
|
||||
// but we're using the datasource interval limit for now
|
||||
const minInterval = datasourceInstance?.interval;
|
||||
|
||||
stopQueryState(querySubscription);
|
||||
|
||||
const datasourceId = datasourceInstance?.meta.id;
|
||||
|
||||
const queryOptions: QueryOptions = {
|
||||
minInterval,
|
||||
// maxDataPoints is used in:
|
||||
// Loki - used for logs streaming for buffer size, with undefined it falls back to datasource config if it supports that.
|
||||
// Elastic - limits the number of datapoints for the counts query and for logs it has hardcoded limit.
|
||||
// Influx - used to correctly display logs in graph
|
||||
// TODO:unification
|
||||
// maxDataPoints: mode === ExploreMode.Logs && datasourceId === 'loki' ? undefined : containerWidth,
|
||||
maxDataPoints: containerWidth,
|
||||
liveStreaming: live,
|
||||
};
|
||||
|
||||
const datasourceName = datasourceInstance.name;
|
||||
const timeZone = getTimeZone(getState().user);
|
||||
const transaction = buildQueryTransaction(queries, queryOptions, range, scanning, timeZone);
|
||||
|
||||
let firstResponse = true;
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Loading }));
|
||||
|
||||
const newQuerySub = runRequest(datasourceInstance, transaction.request)
|
||||
.pipe(
|
||||
// Simple throttle for live tailing, in case of > 1000 rows per interval we spend about 200ms on processing and
|
||||
// rendering. In case this is optimized this can be tweaked, but also it should be only as fast as user
|
||||
// actually can see what is happening.
|
||||
live ? throttleTime(500) : identity,
|
||||
map((data: PanelData) => preProcessPanelData(data, queryResponse)),
|
||||
map(decorateWithFrameTypeMetadata),
|
||||
map(decorateWithGraphResult),
|
||||
map(decorateWithLogsResult({ absoluteRange, refreshInterval, queries })),
|
||||
mergeMap(decorateWithTableResult)
|
||||
)
|
||||
.subscribe(
|
||||
(data) => {
|
||||
if (!data.error && firstResponse) {
|
||||
// Side-effect: Saving history in localstorage
|
||||
const nextHistory = updateHistory(history, datasourceId, queries);
|
||||
const nextRichHistory = addToRichHistory(
|
||||
richHistory || [],
|
||||
datasourceId,
|
||||
datasourceName,
|
||||
queries,
|
||||
false,
|
||||
'',
|
||||
''
|
||||
);
|
||||
dispatch(historyUpdatedAction({ exploreId, history: nextHistory }));
|
||||
dispatch(richHistoryUpdatedAction({ richHistory: nextRichHistory }));
|
||||
|
||||
// We save queries to the URL here so that only successfully run queries change the URL.
|
||||
dispatch(stateSave({ replace: options?.replaceUrl }));
|
||||
// If we have results saved in cache, we are going to use those results instead of running queries
|
||||
if (cachedValue) {
|
||||
newQuerySub = of(cachedValue)
|
||||
.pipe(mergeMap((data: PanelData) => decorateData(data, queryResponse, absoluteRange, refreshInterval, queries)))
|
||||
.subscribe((data) => {
|
||||
if (!data.error) {
|
||||
dispatch(stateSave());
|
||||
}
|
||||
|
||||
firstResponse = false;
|
||||
|
||||
dispatch(queryStreamUpdatedAction({ exploreId, response: data }));
|
||||
});
|
||||
|
||||
// Keep scanning for results if this was the last scanning transaction
|
||||
if (getState().explore[exploreId]!.scanning) {
|
||||
if (data.state === LoadingState.Done && data.series.length === 0) {
|
||||
const range = getShiftedTimeRange(-1, getState().explore[exploreId]!.range);
|
||||
dispatch(updateTime({ exploreId, absoluteRange: range }));
|
||||
dispatch(runQueries(exploreId));
|
||||
} else {
|
||||
// We can stop scanning if we have a result
|
||||
dispatch(scanStopAction({ exploreId }));
|
||||
// If we don't have resuls saved in cache, run new queries
|
||||
} else {
|
||||
if (!hasNonEmptyQuery(queries)) {
|
||||
dispatch(clearQueriesAction({ exploreId }));
|
||||
dispatch(stateSave({ replace: options?.replaceUrl })); // Remember to save to state and update location
|
||||
return;
|
||||
}
|
||||
|
||||
if (!datasourceInstance) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Some datasource's query builders allow per-query interval limits,
|
||||
// but we're using the datasource interval limit for now
|
||||
const minInterval = datasourceInstance?.interval;
|
||||
|
||||
stopQueryState(querySubscription);
|
||||
|
||||
const datasourceId = datasourceInstance?.meta.id;
|
||||
|
||||
const queryOptions: QueryOptions = {
|
||||
minInterval,
|
||||
// maxDataPoints is used in:
|
||||
// Loki - used for logs streaming for buffer size, with undefined it falls back to datasource config if it supports that.
|
||||
// Elastic - limits the number of datapoints for the counts query and for logs it has hardcoded limit.
|
||||
// Influx - used to correctly display logs in graph
|
||||
// TODO:unification
|
||||
// maxDataPoints: mode === ExploreMode.Logs && datasourceId === 'loki' ? undefined : containerWidth,
|
||||
maxDataPoints: containerWidth,
|
||||
liveStreaming: live,
|
||||
};
|
||||
|
||||
const datasourceName = datasourceInstance.name;
|
||||
const timeZone = getTimeZone(getState().user);
|
||||
const transaction = buildQueryTransaction(queries, queryOptions, range, scanning, timeZone);
|
||||
|
||||
let firstResponse = true;
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Loading }));
|
||||
|
||||
newQuerySub = runRequest(datasourceInstance, transaction.request)
|
||||
.pipe(
|
||||
// Simple throttle for live tailing, in case of > 1000 rows per interval we spend about 200ms on processing and
|
||||
// rendering. In case this is optimized this can be tweaked, but also it should be only as fast as user
|
||||
// actually can see what is happening.
|
||||
live ? throttleTime(500) : identity,
|
||||
mergeMap((data: PanelData) => decorateData(data, queryResponse, absoluteRange, refreshInterval, queries))
|
||||
)
|
||||
.subscribe(
|
||||
(data) => {
|
||||
if (!data.error && firstResponse) {
|
||||
// Side-effect: Saving history in localstorage
|
||||
const nextHistory = updateHistory(history, datasourceId, queries);
|
||||
const nextRichHistory = addToRichHistory(
|
||||
richHistory || [],
|
||||
datasourceId,
|
||||
datasourceName,
|
||||
queries,
|
||||
false,
|
||||
'',
|
||||
''
|
||||
);
|
||||
dispatch(historyUpdatedAction({ exploreId, history: nextHistory }));
|
||||
dispatch(richHistoryUpdatedAction({ richHistory: nextRichHistory }));
|
||||
|
||||
// We save queries to the URL here so that only successfully run queries change the URL.
|
||||
dispatch(stateSave({ replace: options?.replaceUrl }));
|
||||
}
|
||||
|
||||
firstResponse = false;
|
||||
|
||||
dispatch(queryStreamUpdatedAction({ exploreId, response: data }));
|
||||
|
||||
// Keep scanning for results if this was the last scanning transaction
|
||||
if (getState().explore[exploreId]!.scanning) {
|
||||
if (data.state === LoadingState.Done && data.series.length === 0) {
|
||||
const range = getShiftedTimeRange(-1, getState().explore[exploreId]!.range);
|
||||
dispatch(updateTime({ exploreId, absoluteRange: range }));
|
||||
dispatch(runQueries(exploreId));
|
||||
} else {
|
||||
// We can stop scanning if we have a result
|
||||
dispatch(scanStopAction({ exploreId }));
|
||||
}
|
||||
}
|
||||
},
|
||||
(error) => {
|
||||
dispatch(notifyApp(createErrorNotification('Query processing error', error)));
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Error }));
|
||||
console.error(error);
|
||||
}
|
||||
},
|
||||
(error) => {
|
||||
dispatch(notifyApp(createErrorNotification('Query processing error', error)));
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Error }));
|
||||
console.error(error);
|
||||
}
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
dispatch(queryStoreSubscriptionAction({ exploreId, querySubscription: newQuerySub }));
|
||||
};
|
||||
@ -439,6 +467,25 @@ export function scanStart(exploreId: ExploreId): ThunkResult<void> {
|
||||
};
|
||||
}
|
||||
|
||||
export function addResultsToCache(exploreId: ExploreId): ThunkResult<void> {
|
||||
return (dispatch, getState) => {
|
||||
const queryResponse = getState().explore[exploreId]!.queryResponse;
|
||||
const absoluteRange = getState().explore[exploreId]!.absoluteRange;
|
||||
const cacheKey = createCacheKey(absoluteRange);
|
||||
|
||||
// Save results to cache only when all results recived and loading is done
|
||||
if (queryResponse.state === LoadingState.Done) {
|
||||
dispatch(addResultsToCacheAction({ exploreId, cacheKey, queryResponse }));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function clearCache(exploreId: ExploreId): ThunkResult<void> {
|
||||
return (dispatch, getState) => {
|
||||
dispatch(clearCacheAction({ exploreId }));
|
||||
};
|
||||
}
|
||||
|
||||
//
|
||||
// Reducer
|
||||
//
|
||||
@ -629,6 +676,32 @@ export const queryReducer = (state: ExploreItemState, action: AnyAction): Explor
|
||||
};
|
||||
}
|
||||
|
||||
if (addResultsToCacheAction.match(action)) {
|
||||
const CACHE_LIMIT = 5;
|
||||
const { cache } = state;
|
||||
const { queryResponse, cacheKey } = action.payload;
|
||||
|
||||
let newCache = [...cache];
|
||||
const isDuplicateKey = newCache.some((c) => c.key === cacheKey);
|
||||
|
||||
if (!isDuplicateKey) {
|
||||
const newCacheItem = { key: cacheKey, value: queryResponse };
|
||||
newCache = [newCacheItem, ...newCache].slice(0, CACHE_LIMIT);
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
cache: newCache,
|
||||
};
|
||||
}
|
||||
|
||||
if (clearCacheAction.match(action)) {
|
||||
return {
|
||||
...state,
|
||||
cache: [],
|
||||
};
|
||||
}
|
||||
|
||||
return state;
|
||||
};
|
||||
|
||||
|
@ -6,6 +6,7 @@ import {
|
||||
HistoryItem,
|
||||
LoadingState,
|
||||
PanelData,
|
||||
AbsoluteTimeRange,
|
||||
} from '@grafana/data';
|
||||
|
||||
import { ExploreItemState } from 'app/types/explore';
|
||||
@ -49,6 +50,7 @@ export const makeExplorePaneState = (): ExploreItemState => ({
|
||||
graphResult: null,
|
||||
logsResult: null,
|
||||
eventBridge: (null as unknown) as EventBusExtended,
|
||||
cache: [],
|
||||
});
|
||||
|
||||
export const createEmptyQueryResponse = (): PanelData => ({
|
||||
@ -96,3 +98,25 @@ export function getUrlStateFromPaneState(pane: ExploreItemState): ExploreUrlStat
|
||||
range: toRawTimeRange(pane.range),
|
||||
};
|
||||
}
|
||||
|
||||
export function createCacheKey(absRange: AbsoluteTimeRange) {
|
||||
const params = {
|
||||
from: absRange.from,
|
||||
to: absRange.to,
|
||||
};
|
||||
|
||||
const cacheKey = Object.entries(params)
|
||||
.map(([k, v]) => `${encodeURIComponent(k)}=${encodeURIComponent(v.toString())}`)
|
||||
.join('&');
|
||||
return cacheKey;
|
||||
}
|
||||
|
||||
export function getResultsFromCache(
|
||||
cache: Array<{ key: string; value: PanelData }>,
|
||||
absoluteRange: AbsoluteTimeRange
|
||||
): PanelData | undefined {
|
||||
const cacheKey = createCacheKey(absoluteRange);
|
||||
const cacheIdx = cache.findIndex((c) => c.key === cacheKey);
|
||||
const cacheValue = cacheIdx >= 0 ? cache[cacheIdx].value : undefined;
|
||||
return cacheValue;
|
||||
}
|
||||
|
@ -11,10 +11,11 @@ import {
|
||||
import { config } from '@grafana/runtime';
|
||||
import { groupBy } from 'lodash';
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
import { map, mergeMap } from 'rxjs/operators';
|
||||
import { dataFrameToLogsModel } from '../../../core/logs_model';
|
||||
import { refreshIntervalToSortOrder } from '../../../core/utils/explore';
|
||||
import { ExplorePanelData } from '../../../types';
|
||||
import { preProcessPanelData } from '../../query/state/runRequest';
|
||||
|
||||
/**
|
||||
* When processing response first we try to determine what kind of dataframes we got as one query can return multiple
|
||||
@ -154,6 +155,23 @@ export const decorateWithLogsResult = (
|
||||
return { ...data, logsResult };
|
||||
};
|
||||
|
||||
// decorateData applies all decorators
|
||||
export function decorateData(
|
||||
data: PanelData,
|
||||
queryResponse: PanelData,
|
||||
absoluteRange: AbsoluteTimeRange,
|
||||
refreshInterval: string | undefined,
|
||||
queries: DataQuery[] | undefined
|
||||
): Observable<ExplorePanelData> {
|
||||
return of(data).pipe(
|
||||
map((data: PanelData) => preProcessPanelData(data, queryResponse)),
|
||||
map(decorateWithFrameTypeMetadata),
|
||||
map(decorateWithGraphResult),
|
||||
map(decorateWithLogsResult({ absoluteRange, refreshInterval, queries })),
|
||||
mergeMap(decorateWithTableResult)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if frame contains time series, which for our purpose means 1 time column and 1 or more numeric columns.
|
||||
*/
|
||||
|
@ -149,6 +149,13 @@ export interface ExploreItemState {
|
||||
showTable?: boolean;
|
||||
showTrace?: boolean;
|
||||
showNodeGraph?: boolean;
|
||||
|
||||
/**
|
||||
* We are using caching to store query responses of queries run from logs navigation.
|
||||
* In logs navigation, we do pagination and we don't want our users to unnecessarily run the same queries that they've run just moments before.
|
||||
* We are currently caching last 5 query responses.
|
||||
*/
|
||||
cache: Array<{ key: string; value: PanelData }>;
|
||||
}
|
||||
|
||||
export interface ExploreUpdateState {
|
||||
|
Loading…
Reference in New Issue
Block a user