mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Explore: Create DataSourceWithSupplementaryQueriesSupport interface to support log volume and samples (#61298)
* Create DataSourceWithLogsSampleSupport check and move to 1 place * Add new SupplementaryQueryType * Add and change utility functions for loading, storing ancd checking supp queries * Add logic to redux for processing of new type of supp query * Implement queryLogsSample used to run samples queries * Fix tests to include also Log samples * Add tests * Temporarily, default to false * Change comment * Fix lint error * Refactor handling of supplementary queries in query.ts * Fix looping over array * Remove changes for any => unknowns as in utils.ts * Fix logic * Fix incorrect imports after function was moved to different file * Migrate old log volume key * Update public/app/features/explore/utils/supplementaryQueries.ts Co-authored-by: Piotr Jamróz <pm.jamroz@gmail.com> * Refactor to use DataSourceWithSupplementaryQueriesSupport * Refactor, improve tests, change internal API * Update packages/grafana-data/src/types/logs.ts Co-authored-by: Piotr Jamróz <pm.jamroz@gmail.com> * Add deprecation for DataSourceWithLogsVolumeSupport, but still support it * Update comment with correct new issue Co-authored-by: Piotr Jamróz <pm.jamroz@gmail.com>
This commit is contained in:
parent
e517cc0cea
commit
c106c7700b
@ -1,5 +1,5 @@
|
||||
// BETTERER RESULTS V2.
|
||||
//
|
||||
//
|
||||
// If this file contains merge conflicts, use `betterer merge` to automatically resolve them:
|
||||
// https://phenomnomnominal.github.io/betterer/docs/results-file/#merge
|
||||
//
|
||||
@ -531,7 +531,8 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Do not use any type assertions.", "8"]
|
||||
],
|
||||
"packages/grafana-data/src/types/logs.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"]
|
||||
],
|
||||
"packages/grafana-data/src/types/logsVolume.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
@ -3924,16 +3925,6 @@ exports[`better eslint`] = {
|
||||
"public/app/features/explore/state/main.test.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/features/explore/state/query.test.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"]
|
||||
],
|
||||
"public/app/features/explore/state/time.test.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
|
@ -1,6 +1,8 @@
|
||||
import { Observable } from 'rxjs';
|
||||
|
||||
import { Labels } from './data';
|
||||
import { DataFrame } from './dataFrame';
|
||||
import { DataQueryResponse } from './datasource';
|
||||
import { DataQueryRequest, DataQueryResponse } from './datasource';
|
||||
import { DataQuery } from './query';
|
||||
import { AbsoluteTimeRange } from './time';
|
||||
|
||||
@ -176,3 +178,42 @@ export const hasLogsContextSupport = (datasource: unknown): datasource is DataSo
|
||||
|
||||
return withLogsSupport.getLogRowContext !== undefined && withLogsSupport.showContextToggle !== undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Types of supplementary queries that can be run in Explore.
|
||||
* @internal
|
||||
*/
|
||||
export enum SupplementaryQueryType {
|
||||
LogsVolume = 'LogsVolume',
|
||||
LogsSample = 'LogsSample',
|
||||
}
|
||||
|
||||
/**
|
||||
* Data sources that support supplementary queries in Explore.
|
||||
* This will enable users to see additional data when running original queries.
|
||||
* Supported supplementary queries are defined in SupplementaryQueryType enum.
|
||||
* @internal
|
||||
*/
|
||||
export interface DataSourceWithSupplementaryQueriesSupport<TQuery extends DataQuery> {
|
||||
getDataProvider(
|
||||
type: SupplementaryQueryType,
|
||||
request: DataQueryRequest<TQuery>
|
||||
): Observable<DataQueryResponse> | undefined;
|
||||
getSupportedSupplementaryQueryTypes(): SupplementaryQueryType[];
|
||||
}
|
||||
|
||||
export const hasSupplementaryQuerySupport = <TQuery extends DataQuery>(
|
||||
datasource: unknown,
|
||||
type: SupplementaryQueryType
|
||||
): datasource is DataSourceWithSupplementaryQueriesSupport<TQuery> => {
|
||||
if (!datasource) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const withSupplementaryQueriesSupport = datasource as DataSourceWithSupplementaryQueriesSupport<TQuery>;
|
||||
|
||||
return (
|
||||
withSupplementaryQueriesSupport.getDataProvider !== undefined &&
|
||||
withSupplementaryQueriesSupport.getSupportedSupplementaryQueryTypes().includes(type)
|
||||
);
|
||||
};
|
||||
|
@ -4,17 +4,20 @@ import { DataQueryRequest, DataQueryResponse } from './datasource';
|
||||
import { DataQuery } from './query';
|
||||
|
||||
/**
|
||||
* TODO: This should be added to ./logs.ts but because of cross reference between ./datasource.ts and ./logs.ts it can
|
||||
* be done only after decoupling "logs" from "datasource" (https://github.com/grafana/grafana/pull/39536)
|
||||
* Support for DataSourceWithLogsVolumeSupport is deprecated and will be removed in the next major version.
|
||||
* Use DataSourceWithSupplementaryQueriesSupport instead.
|
||||
*
|
||||
* @internal
|
||||
* @deprecated
|
||||
*/
|
||||
export interface DataSourceWithLogsVolumeSupport<TQuery extends DataQuery> {
|
||||
getLogsVolumeDataProvider(request: DataQueryRequest<TQuery>): Observable<DataQueryResponse> | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Support for hasLogsVolumeSupport is deprecated and will be removed in the next major version.
|
||||
* Use DataSourceWithSupplementaryQueriesSupport and hasSupplementaryQuerySupport instead.
|
||||
*
|
||||
* @deprecated
|
||||
*/
|
||||
export const hasLogsVolumeSupport = <TQuery extends DataQuery>(
|
||||
datasource: unknown
|
||||
|
@ -28,6 +28,7 @@ import {
|
||||
LIMIT_LABEL,
|
||||
logSeriesToLogsModel,
|
||||
queryLogsVolume,
|
||||
queryLogsSample,
|
||||
} from './logsModel';
|
||||
|
||||
describe('dedupLogRows()', () => {
|
||||
@ -1223,3 +1224,95 @@ describe('logs volume', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('logs sample', () => {
|
||||
class TestDataQuery implements DataQuery {
|
||||
refId = 'A';
|
||||
target = '';
|
||||
}
|
||||
|
||||
let logsSampleProvider: Observable<DataQueryResponse>,
|
||||
datasource: MockObservableDataSourceApi,
|
||||
request: DataQueryRequest<TestDataQuery>;
|
||||
|
||||
function createFrame(labels: object[], timestamps: number[], values: string[]) {
|
||||
return toDataFrame({
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: timestamps },
|
||||
{
|
||||
name: 'Line',
|
||||
type: FieldType.string,
|
||||
values,
|
||||
},
|
||||
{ name: 'labels', type: FieldType.other, values: labels },
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
function setup(datasourceSetup: () => void) {
|
||||
datasourceSetup();
|
||||
request = {
|
||||
targets: [{ target: 'logs sample query 1' }, { target: 'logs sample query 2' }],
|
||||
scopedVars: {},
|
||||
} as unknown as DataQueryRequest<TestDataQuery>;
|
||||
logsSampleProvider = queryLogsSample(datasource, request);
|
||||
}
|
||||
const resultAFrame1 = createFrame([{ app: 'app01' }], [100, 200, 300], ['line 1', 'line 2', 'line 3']);
|
||||
const resultAFrame2 = createFrame(
|
||||
[{ app: 'app01', level: 'error' }],
|
||||
[100, 200, 300],
|
||||
['line 4', 'line 5', 'line 6']
|
||||
);
|
||||
|
||||
const resultBFrame1 = createFrame([{ app: 'app02' }], [100, 200, 300], ['line A', 'line B', 'line C']);
|
||||
const resultBFrame2 = createFrame(
|
||||
[{ app: 'app02', level: 'error' }],
|
||||
[100, 200, 300],
|
||||
['line D', 'line E', 'line F']
|
||||
);
|
||||
|
||||
function setupMultipleResults() {
|
||||
datasource = new MockObservableDataSourceApi('loki', [
|
||||
{
|
||||
data: [resultAFrame1, resultAFrame2],
|
||||
},
|
||||
{
|
||||
data: [resultBFrame1, resultBFrame2],
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
function setupErrorResponse() {
|
||||
datasource = new MockObservableDataSourceApi('loki', [], undefined, 'Error message');
|
||||
}
|
||||
|
||||
it('returns data', async () => {
|
||||
setup(setupMultipleResults);
|
||||
await expect(logsSampleProvider).toEmitValuesWith((received) => {
|
||||
expect(received).toMatchObject([
|
||||
{ state: LoadingState.Loading, error: undefined, data: [] },
|
||||
{
|
||||
state: LoadingState.Done,
|
||||
error: undefined,
|
||||
data: [resultAFrame1, resultAFrame2, resultBFrame1, resultBFrame2],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns error', async () => {
|
||||
setup(setupErrorResponse);
|
||||
|
||||
await expect(logsSampleProvider).toEmitValuesWith((received) => {
|
||||
expect(received).toMatchObject([
|
||||
{ state: LoadingState.Loading, error: undefined, data: [] },
|
||||
{
|
||||
state: LoadingState.Error,
|
||||
error: 'Error message',
|
||||
data: [],
|
||||
},
|
||||
'Error message',
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -747,6 +747,63 @@ export function queryLogsVolume<TQuery extends DataQuery, TOptions extends DataS
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an observable, which makes requests to get logs samples.
|
||||
*/
|
||||
export function queryLogsSample<TQuery extends DataQuery, TOptions extends DataSourceJsonData>(
|
||||
datasource: DataSourceApi<TQuery, TOptions>,
|
||||
logsSampleRequest: DataQueryRequest<TQuery>
|
||||
): Observable<DataQueryResponse> {
|
||||
logsSampleRequest.hideFromInspector = true;
|
||||
|
||||
return new Observable((observer) => {
|
||||
let rawLogsSample: DataFrame[] = [];
|
||||
observer.next({
|
||||
state: LoadingState.Loading,
|
||||
error: undefined,
|
||||
data: [],
|
||||
});
|
||||
|
||||
const queryResponse = datasource.query(logsSampleRequest);
|
||||
const queryObservable = isObservable(queryResponse) ? queryResponse : from(queryResponse);
|
||||
|
||||
const subscription = queryObservable.subscribe({
|
||||
complete: () => {
|
||||
observer.next({
|
||||
state: LoadingState.Done,
|
||||
error: undefined,
|
||||
data: rawLogsSample,
|
||||
});
|
||||
observer.complete();
|
||||
},
|
||||
next: (dataQueryResponse: DataQueryResponse) => {
|
||||
const { error } = dataQueryResponse;
|
||||
if (error !== undefined) {
|
||||
observer.next({
|
||||
state: LoadingState.Error,
|
||||
error,
|
||||
data: [],
|
||||
});
|
||||
observer.error(error);
|
||||
} else {
|
||||
rawLogsSample = rawLogsSample.concat(dataQueryResponse.data.map(toDataFrame));
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
observer.next({
|
||||
state: LoadingState.Error,
|
||||
error: error,
|
||||
data: [],
|
||||
});
|
||||
observer.error(error);
|
||||
},
|
||||
});
|
||||
return () => {
|
||||
subscription?.unsubscribe();
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function getIntervalInfo(scopedVars: ScopedVars, timespanMs: number): { interval: string; intervalMs?: number } {
|
||||
if (scopedVars.__interval) {
|
||||
let intervalMs: number = scopedVars.__interval_ms.value;
|
||||
|
@ -11,10 +11,11 @@ import {
|
||||
EventBus,
|
||||
SplitOpen,
|
||||
DataFrame,
|
||||
SupplementaryQueryType,
|
||||
} from '@grafana/data';
|
||||
import { Collapse } from '@grafana/ui';
|
||||
import { StoreState } from 'app/types';
|
||||
import { ExploreId, ExploreItemState, SupplementaryQueryType } from 'app/types/explore';
|
||||
import { ExploreId, ExploreItemState } from 'app/types/explore';
|
||||
|
||||
import { getTimeZone } from '../profile/state/selectors';
|
||||
|
||||
|
@ -8,9 +8,11 @@ import { stopQueryState } from 'app/core/utils/explore';
|
||||
import { ExploreItemState, ThunkResult } from 'app/types';
|
||||
import { ExploreId } from 'app/types/explore';
|
||||
|
||||
import { loadSupplementaryQueries } from '../utils/supplementaryQueries';
|
||||
|
||||
import { importQueries, runQueries } from './query';
|
||||
import { changeRefreshInterval } from './time';
|
||||
import { createEmptyQueryResponse, loadAndInitDatasource, loadSupplementaryQueries } from './utils';
|
||||
import { createEmptyQueryResponse, loadAndInitDatasource } from './utils';
|
||||
|
||||
//
|
||||
// Actions and Payloads
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { DefaultTimeZone, toUtc } from '@grafana/data';
|
||||
import { DefaultTimeZone, TimeRange, toUtc, SupplementaryQueryType } from '@grafana/data';
|
||||
|
||||
import { ExploreId, SupplementaryQueryType } from '../../../types';
|
||||
import { ExploreId } from '../../../types';
|
||||
|
||||
export const createDefaultInitialState = () => {
|
||||
const t = toUtc();
|
||||
const testRange = {
|
||||
const testRange: TimeRange = {
|
||||
from: t,
|
||||
to: t,
|
||||
raw: {
|
||||
@ -23,7 +23,10 @@ export const createDefaultInitialState = () => {
|
||||
datasourceInstance: {
|
||||
query: jest.fn(),
|
||||
getRef: jest.fn(),
|
||||
getLogsVolumeDataProvider: jest.fn(),
|
||||
getDataProvider: jest.fn(),
|
||||
getSupportedSupplementaryQueryTypes: jest
|
||||
.fn()
|
||||
.mockImplementation(() => [SupplementaryQueryType.LogsVolume, SupplementaryQueryType.LogsSample]),
|
||||
meta: {
|
||||
id: 'something',
|
||||
},
|
||||
@ -44,6 +47,9 @@ export const createDefaultInitialState = () => {
|
||||
[SupplementaryQueryType.LogsVolume]: {
|
||||
enabled: true,
|
||||
},
|
||||
[SupplementaryQueryType.LogsSample]: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { snakeCase } from 'lodash';
|
||||
import { EMPTY, interval, Observable, of } from 'rxjs';
|
||||
import { thunkTester } from 'test/core/thunk/thunkTester';
|
||||
import { assertIsDefined } from 'test/helpers/asserts';
|
||||
@ -8,16 +9,18 @@ import {
|
||||
DataQueryResponse,
|
||||
DataSourceApi,
|
||||
DataSourceJsonData,
|
||||
DataSourceWithLogsVolumeSupport,
|
||||
DataSourceWithSupplementaryQueriesSupport,
|
||||
LoadingState,
|
||||
MutableDataFrame,
|
||||
RawTimeRange,
|
||||
SupplementaryQueryType,
|
||||
} from '@grafana/data';
|
||||
import { ExploreId, ExploreItemState, StoreState, SupplementaryQueryType, ThunkDispatch } from 'app/types';
|
||||
import { ExploreId, ExploreItemState, StoreState, ThunkDispatch } from 'app/types';
|
||||
|
||||
import { reducerTester } from '../../../../test/core/redux/reducerTester';
|
||||
import { configureStore } from '../../../store/configureStore';
|
||||
import { setTimeSrv } from '../../dashboard/services/TimeSrv';
|
||||
import { setTimeSrv, TimeSrv } from '../../dashboard/services/TimeSrv';
|
||||
import { supplementaryQueryTypes } from '../utils/supplementaryQueries';
|
||||
|
||||
import { createDefaultInitialState } from './helpers';
|
||||
import { saveCorrelationsAction } from './main';
|
||||
@ -33,8 +36,8 @@ import {
|
||||
runQueries,
|
||||
scanStartAction,
|
||||
scanStopAction,
|
||||
storeSupplementaryQueryDataProviderAction,
|
||||
setSupplementaryQueryEnabled,
|
||||
cleanSupplementaryQueryDataProviderAction,
|
||||
} from './query';
|
||||
import { makeExplorePaneState } from './utils';
|
||||
|
||||
@ -99,10 +102,10 @@ function setupQueryResponse(state: StoreState) {
|
||||
|
||||
describe('runQueries', () => {
|
||||
const setupTests = () => {
|
||||
setTimeSrv({ init() {} } as any);
|
||||
setTimeSrv({ init() {} } as unknown as TimeSrv);
|
||||
return configureStore({
|
||||
...(defaultInitialState as any),
|
||||
});
|
||||
...defaultInitialState,
|
||||
} as unknown as Partial<StoreState>);
|
||||
};
|
||||
|
||||
it('should pass dataFrames to state even if there is error in response', async () => {
|
||||
@ -114,20 +117,23 @@ describe('runQueries', () => {
|
||||
expect(getState().explore[ExploreId.left].graphResult).toBeDefined();
|
||||
});
|
||||
|
||||
it('should modify the request-id for log-volume queries', async () => {
|
||||
it('should modify the request-id for all supplementary queries', () => {
|
||||
const { dispatch, getState } = setupTests();
|
||||
setupQueryResponse(getState());
|
||||
await dispatch(saveCorrelationsAction([]));
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
dispatch(saveCorrelationsAction([]));
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
|
||||
const state = getState().explore[ExploreId.left];
|
||||
expect(state.queryResponse.request?.requestId).toBe('explore_left');
|
||||
const datasource = state.datasourceInstance as unknown as DataSourceWithLogsVolumeSupport<DataQuery>;
|
||||
expect(datasource.getLogsVolumeDataProvider).toBeCalledWith(
|
||||
expect.objectContaining({
|
||||
requestId: 'explore_left_log_volume',
|
||||
})
|
||||
);
|
||||
const datasource = state.datasourceInstance as unknown as DataSourceWithSupplementaryQueriesSupport<DataQuery>;
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
expect(datasource.getDataProvider).toHaveBeenCalledWith(
|
||||
type,
|
||||
expect.objectContaining({
|
||||
requestId: `explore_left_${snakeCase(type)}`,
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('should set state to done if query completes without emitting', async () => {
|
||||
@ -164,7 +170,10 @@ describe('running queries', () => {
|
||||
querySubscription: unsubscribable,
|
||||
queries: ['A'],
|
||||
range: testRange,
|
||||
supplementaryQueries: { [SupplementaryQueryType.LogsVolume]: { enabled: true } },
|
||||
supplementaryQueries: {
|
||||
[SupplementaryQueryType.LogsVolume]: { enabled: true },
|
||||
[SupplementaryQueryType.LogsSample]: { enabled: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@ -180,8 +189,10 @@ describe('running queries', () => {
|
||||
expect(dispatchedActions).toEqual([
|
||||
scanStopAction({ exploreId }),
|
||||
cancelQueriesAction({ exploreId }),
|
||||
storeSupplementaryQueryDataProviderAction({ exploreId, type: SupplementaryQueryType.LogsVolume }),
|
||||
cleanSupplementaryQueryDataProviderAction({ exploreId, type: SupplementaryQueryType.LogsVolume }),
|
||||
cleanSupplementaryQueryAction({ exploreId, type: SupplementaryQueryType.LogsVolume }),
|
||||
cleanSupplementaryQueryDataProviderAction({ exploreId, type: SupplementaryQueryType.LogsSample }),
|
||||
cleanSupplementaryQueryAction({ exploreId, type: SupplementaryQueryType.LogsSample }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
@ -190,14 +201,14 @@ describe('importing queries', () => {
|
||||
describe('when importing queries between the same type of data source', () => {
|
||||
it('remove datasource property from all of the queries', async () => {
|
||||
const { dispatch, getState }: { dispatch: ThunkDispatch; getState: () => StoreState } = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
...defaultInitialState,
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
datasourceInstance: datasources[0],
|
||||
},
|
||||
},
|
||||
});
|
||||
} as unknown as Partial<StoreState>);
|
||||
|
||||
await dispatch(
|
||||
importQueries(
|
||||
@ -276,7 +287,7 @@ describe('reducer', () => {
|
||||
describe('caching', () => {
|
||||
it('should add response to cache', async () => {
|
||||
const { dispatch, getState }: { dispatch: ThunkDispatch; getState: () => StoreState } = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
...defaultInitialState,
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
@ -287,7 +298,7 @@ describe('reducer', () => {
|
||||
absoluteRange: { from: 1621348027000, to: 1621348050000 },
|
||||
},
|
||||
},
|
||||
});
|
||||
} as unknown as Partial<StoreState>);
|
||||
|
||||
await dispatch(addResultsToCache(ExploreId.left));
|
||||
|
||||
@ -298,7 +309,7 @@ describe('reducer', () => {
|
||||
|
||||
it('should not add response to cache if response is still loading', async () => {
|
||||
const { dispatch, getState }: { dispatch: ThunkDispatch; getState: () => StoreState } = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
...defaultInitialState,
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
@ -306,7 +317,7 @@ describe('reducer', () => {
|
||||
absoluteRange: { from: 1621348027000, to: 1621348050000 },
|
||||
},
|
||||
},
|
||||
});
|
||||
} as unknown as Partial<StoreState>);
|
||||
|
||||
await dispatch(addResultsToCache(ExploreId.left));
|
||||
|
||||
@ -315,7 +326,7 @@ describe('reducer', () => {
|
||||
|
||||
it('should not add duplicate response to cache', async () => {
|
||||
const { dispatch, getState }: { dispatch: ThunkDispatch; getState: () => StoreState } = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
...defaultInitialState,
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
@ -332,7 +343,7 @@ describe('reducer', () => {
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
} as unknown as Partial<StoreState>);
|
||||
|
||||
await dispatch(addResultsToCache(ExploreId.left));
|
||||
|
||||
@ -344,7 +355,7 @@ describe('reducer', () => {
|
||||
|
||||
it('should clear cache', async () => {
|
||||
const { dispatch, getState }: { dispatch: ThunkDispatch; getState: () => StoreState } = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
...defaultInitialState,
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
@ -356,7 +367,7 @@ describe('reducer', () => {
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
} as unknown as Partial<StoreState>);
|
||||
|
||||
await dispatch(clearCache(ExploreId.left));
|
||||
|
||||
@ -364,15 +375,15 @@ describe('reducer', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('log volume', () => {
|
||||
describe('supplementary queries', () => {
|
||||
let dispatch: ThunkDispatch,
|
||||
getState: () => StoreState,
|
||||
unsubscribes: Function[],
|
||||
mockLogsVolumeDataProvider: () => Observable<DataQueryResponse>;
|
||||
mockDataProvider: () => Observable<DataQueryResponse>;
|
||||
|
||||
beforeEach(() => {
|
||||
unsubscribes = [];
|
||||
mockLogsVolumeDataProvider = () => {
|
||||
mockDataProvider = () => {
|
||||
return {
|
||||
subscribe: () => {
|
||||
const unsubscribe = jest.fn();
|
||||
@ -385,7 +396,7 @@ describe('reducer', () => {
|
||||
};
|
||||
|
||||
const store: { dispatch: ThunkDispatch; getState: () => StoreState } = configureStore({
|
||||
...(defaultInitialState as any),
|
||||
...defaultInitialState,
|
||||
explore: {
|
||||
[ExploreId.left]: {
|
||||
...defaultInitialState.explore[ExploreId.left],
|
||||
@ -395,13 +406,17 @@ describe('reducer', () => {
|
||||
meta: {
|
||||
id: 'something',
|
||||
},
|
||||
getLogsVolumeDataProvider: () => {
|
||||
return mockLogsVolumeDataProvider();
|
||||
getDataProvider: () => {
|
||||
return mockDataProvider();
|
||||
},
|
||||
getSupportedSupplementaryQueryTypes: () => [
|
||||
SupplementaryQueryType.LogsVolume,
|
||||
SupplementaryQueryType.LogsSample,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
} as unknown as Partial<StoreState>);
|
||||
|
||||
dispatch = store.dispatch;
|
||||
getState = store.getState;
|
||||
@ -409,109 +424,111 @@ describe('reducer', () => {
|
||||
setupQueryResponse(getState());
|
||||
});
|
||||
|
||||
it('should cancel any unfinished logs volume queries when a new query is run', async () => {
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
it('should cancel any unfinished supplementary queries when a new query is run', async () => {
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
// first query is run automatically
|
||||
// loading in progress - one subscription created, not cleaned up yet
|
||||
expect(unsubscribes).toHaveLength(1);
|
||||
// loading in progress - subscriptions for both supplementary queries are created, not cleaned up yet
|
||||
expect(unsubscribes).toHaveLength(2);
|
||||
expect(unsubscribes[0]).not.toBeCalled();
|
||||
expect(unsubscribes[1]).not.toBeCalled();
|
||||
|
||||
setupQueryResponse(getState());
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
// a new query is run while log volume query is not resolve yet...
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
// a new query is run while supplementary queries are not resolve yet...
|
||||
expect(unsubscribes[0]).toBeCalled();
|
||||
// first subscription is cleaned up, a new subscription is created automatically
|
||||
expect(unsubscribes[1]).toBeCalled();
|
||||
// first subscriptions are cleaned up, a new subscriptions are created automatically
|
||||
expect(unsubscribes).toHaveLength(4);
|
||||
expect(unsubscribes[2]).not.toBeCalled();
|
||||
expect(unsubscribes[3]).not.toBeCalled();
|
||||
});
|
||||
|
||||
it('should cancel all supported supplementary queries when the main query is canceled', () => {
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
expect(unsubscribes).toHaveLength(2);
|
||||
expect(unsubscribes[1]).not.toBeCalled();
|
||||
});
|
||||
|
||||
it('should cancel log volume query when the main query is canceled', async () => {
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
expect(unsubscribes).toHaveLength(1);
|
||||
expect(unsubscribes[0]).not.toBeCalled();
|
||||
expect(unsubscribes[1]).not.toBeCalled();
|
||||
|
||||
await dispatch(cancelQueries(ExploreId.left));
|
||||
expect(unsubscribes).toHaveLength(1);
|
||||
dispatch(cancelQueries(ExploreId.left));
|
||||
expect(unsubscribes).toHaveLength(2);
|
||||
expect(unsubscribes[0]).toBeCalled();
|
||||
expect(unsubscribes[1]).toBeCalled();
|
||||
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data
|
||||
).toBeUndefined();
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].dataProvider
|
||||
).toBeUndefined();
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].data).toBeUndefined();
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].dataProvider).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should load logs volume after running the query', async () => {
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
expect(unsubscribes).toHaveLength(1);
|
||||
it('should load supplementary queries after running the query', () => {
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
expect(unsubscribes).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should clean any incomplete log volume data when main query is canceled', async () => {
|
||||
mockLogsVolumeDataProvider = () => {
|
||||
it('should clean any incomplete supplementary queries data when main query is canceled', () => {
|
||||
mockDataProvider = () => {
|
||||
return of({ state: LoadingState.Loading, error: undefined, data: [] });
|
||||
};
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data
|
||||
).toBeDefined();
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data!.state
|
||||
).toBe(LoadingState.Loading);
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].dataProvider
|
||||
).toBeDefined();
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].data).toBeDefined();
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].data!.state).toBe(LoadingState.Loading);
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].dataProvider).toBeDefined();
|
||||
}
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].data).toBeDefined();
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].data!.state).toBe(LoadingState.Loading);
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].dataProvider).toBeDefined();
|
||||
}
|
||||
|
||||
await dispatch(cancelQueries(ExploreId.left));
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data
|
||||
).toBeUndefined();
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data
|
||||
).toBeUndefined();
|
||||
dispatch(cancelQueries(ExploreId.left));
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].data).toBeUndefined();
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[type].data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('keeps complete log volume data when main query is canceled', async () => {
|
||||
mockLogsVolumeDataProvider = () => {
|
||||
it('keeps complete supplementary data when main query is canceled', async () => {
|
||||
mockDataProvider = () => {
|
||||
return of(
|
||||
{ state: LoadingState.Loading, error: undefined, data: [] },
|
||||
{ state: LoadingState.Done, error: undefined, data: [{}] }
|
||||
);
|
||||
};
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data
|
||||
).toBeDefined();
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data!.state
|
||||
).toBe(LoadingState.Done);
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].dataProvider
|
||||
).toBeDefined();
|
||||
for (const types of supplementaryQueryTypes) {
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[types].data).toBeDefined();
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[types].data!.state).toBe(LoadingState.Done);
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[types].dataProvider).toBeDefined();
|
||||
}
|
||||
|
||||
await dispatch(cancelQueries(ExploreId.left));
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data
|
||||
).toBeDefined();
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data!.state
|
||||
).toBe(LoadingState.Done);
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].dataProvider
|
||||
).toBeUndefined();
|
||||
dispatch(cancelQueries(ExploreId.left));
|
||||
|
||||
for (const types of supplementaryQueryTypes) {
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[types].data).toBeDefined();
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[types].data!.state).toBe(LoadingState.Done);
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[types].dataProvider).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('do not load logsVolume data when disabled', async () => {
|
||||
// turn logsvolume off
|
||||
it('do not load disabled supplementary query data', () => {
|
||||
mockDataProvider = () => {
|
||||
return of({ state: LoadingState.Done, error: undefined, data: [{}] });
|
||||
};
|
||||
// turn logs volume off (but keep log sample on)
|
||||
dispatch(setSupplementaryQueryEnabled(ExploreId.left, false, SupplementaryQueryType.LogsVolume));
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].enabled).toBe(
|
||||
false
|
||||
);
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsSample].enabled).toBe(
|
||||
true
|
||||
);
|
||||
|
||||
// verify that if we run a query, it will: 1) not do logs volume, 2) do logs sample 3) provider will still be set for both
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
|
||||
// verify that if we run a query, it will not do logsvolume, but the Provider will still be set
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].data
|
||||
).toBeUndefined();
|
||||
@ -521,29 +538,51 @@ describe('reducer', () => {
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].dataProvider
|
||||
).toBeDefined();
|
||||
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsSample].data
|
||||
).toBeDefined();
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsSample].dataSubscription
|
||||
).toBeDefined();
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsSample].dataProvider
|
||||
).toBeDefined();
|
||||
});
|
||||
|
||||
it('load logsVolume data when it gets enabled', async () => {
|
||||
// first it is disabled
|
||||
it('load data of supplementary query that gets enabled', async () => {
|
||||
// first we start with both supplementary queries disabled
|
||||
dispatch(setSupplementaryQueryEnabled(ExploreId.left, false, SupplementaryQueryType.LogsVolume));
|
||||
dispatch(setSupplementaryQueryEnabled(ExploreId.left, false, SupplementaryQueryType.LogsSample));
|
||||
|
||||
// runQueries sets up the logsVolume query, but does not run it
|
||||
await dispatch(runQueries(ExploreId.left));
|
||||
// runQueries sets up providers, but does not run queries
|
||||
dispatch(runQueries(ExploreId.left));
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].dataProvider
|
||||
).toBeDefined();
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsSample].dataProvider
|
||||
).toBeDefined();
|
||||
|
||||
// we turn logsvolume on
|
||||
await dispatch(setSupplementaryQueryEnabled(ExploreId.left, true, SupplementaryQueryType.LogsVolume));
|
||||
// we turn 1 supplementary query (logs volume) on
|
||||
dispatch(setSupplementaryQueryEnabled(ExploreId.left, true, SupplementaryQueryType.LogsVolume));
|
||||
|
||||
// verify it was turned on
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].enabled).toBe(
|
||||
true
|
||||
);
|
||||
// verify that other stay off
|
||||
expect(getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsSample].enabled).toBe(
|
||||
false
|
||||
);
|
||||
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsVolume].dataSubscription
|
||||
).toBeDefined();
|
||||
|
||||
expect(
|
||||
getState().explore[ExploreId.left].supplementaryQueries[SupplementaryQueryType.LogsSample].dataSubscription
|
||||
).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { AnyAction, createAction, PayloadAction } from '@reduxjs/toolkit';
|
||||
import deepEqual from 'fast-deep-equal';
|
||||
import { flatten, groupBy } from 'lodash';
|
||||
import { flatten, groupBy, snakeCase } from 'lodash';
|
||||
import { identity, Observable, of, SubscriptionLike, Unsubscribable, combineLatest } from 'rxjs';
|
||||
import { mergeMap, throttleTime } from 'rxjs/operators';
|
||||
|
||||
@ -10,7 +10,8 @@ import {
|
||||
DataQueryErrorType,
|
||||
DataQueryResponse,
|
||||
DataSourceApi,
|
||||
hasLogsVolumeSupport,
|
||||
hasSupplementaryQuerySupport,
|
||||
SupplementaryQueryType,
|
||||
hasQueryExportSupport,
|
||||
hasQueryImportSupport,
|
||||
HistoryItem,
|
||||
@ -18,6 +19,7 @@ import {
|
||||
PanelEvents,
|
||||
QueryFixAction,
|
||||
toLegacyResponseData,
|
||||
hasLogsVolumeSupport,
|
||||
} from '@grafana/data';
|
||||
import { config, getDataSourceSrv, reportInteraction } from '@grafana/runtime';
|
||||
import {
|
||||
@ -36,22 +38,18 @@ import { getTimeZone } from 'app/features/profile/state/selectors';
|
||||
import { MIXED_DATASOURCE_NAME } from 'app/plugins/datasource/mixed/MixedDataSource';
|
||||
import { store } from 'app/store/store';
|
||||
import { ExploreItemState, ExplorePanelData, ThunkDispatch, ThunkResult } from 'app/types';
|
||||
import { ExploreId, ExploreState, QueryOptions, SupplementaryQueryType, SupplementaryQueries } from 'app/types/explore';
|
||||
import { ExploreId, ExploreState, QueryOptions, SupplementaryQueries } from 'app/types/explore';
|
||||
|
||||
import { notifyApp } from '../../../core/actions';
|
||||
import { createErrorNotification } from '../../../core/copy/appNotification';
|
||||
import { runRequest } from '../../query/state/runRequest';
|
||||
import { decorateData } from '../utils/decorators';
|
||||
import { storeSupplementaryQueryEnabled, supplementaryQueryTypes } from '../utils/supplementaryQueries';
|
||||
|
||||
import { addHistoryItem, historyUpdatedAction, loadRichHistory } from './history';
|
||||
import { stateSave } from './main';
|
||||
import { updateTime } from './time';
|
||||
import {
|
||||
createCacheKey,
|
||||
getResultsFromCache,
|
||||
storeSupplementaryQueryEnabled,
|
||||
SUPPLEMENTARY_QUERY_TYPES,
|
||||
} from './utils';
|
||||
import { createCacheKey, getResultsFromCache } from './utils';
|
||||
|
||||
//
|
||||
// Actions and Payloads
|
||||
@ -112,13 +110,22 @@ export interface StoreSupplementaryQueryDataProvider {
|
||||
type: SupplementaryQueryType;
|
||||
}
|
||||
|
||||
export interface CleanSupplementaryQueryDataProvider {
|
||||
exploreId: ExploreId;
|
||||
type: SupplementaryQueryType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores available logs volume provider after running the query. Used internally by runQueries().
|
||||
* Stores available supplementary query data provider after running the query. Used internally by runQueries().
|
||||
*/
|
||||
export const storeSupplementaryQueryDataProviderAction = createAction<StoreSupplementaryQueryDataProvider>(
|
||||
'explore/storeSupplementaryQueryDataProviderAction'
|
||||
);
|
||||
|
||||
export const cleanSupplementaryQueryDataProviderAction = createAction<CleanSupplementaryQueryDataProvider>(
|
||||
'explore/cleanSupplementaryQueryDataProviderAction'
|
||||
);
|
||||
|
||||
export const cleanSupplementaryQueryAction = createAction<{ exploreId: ExploreId; type: SupplementaryQueryType }>(
|
||||
'explore/cleanSupplementaryQueryAction'
|
||||
);
|
||||
@ -246,8 +253,8 @@ export function cancelQueries(exploreId: ExploreId): ThunkResult<void> {
|
||||
|
||||
const supplementaryQueries = getState().explore[exploreId]!.supplementaryQueries;
|
||||
// Cancel all data providers
|
||||
for (const type of SUPPLEMENTARY_QUERY_TYPES) {
|
||||
dispatch(storeSupplementaryQueryDataProviderAction({ exploreId, dataProvider: undefined, type }));
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
dispatch(cleanSupplementaryQueryDataProviderAction({ exploreId, type }));
|
||||
|
||||
// And clear any incomplete data
|
||||
if (supplementaryQueries[type]?.data?.state !== LoadingState.Done) {
|
||||
@ -438,6 +445,7 @@ export const runQueries = (
|
||||
refreshInterval,
|
||||
absoluteRange,
|
||||
cache,
|
||||
supplementaryQueries,
|
||||
} = exploreItemState;
|
||||
let newQuerySub;
|
||||
|
||||
@ -466,7 +474,9 @@ export const runQueries = (
|
||||
refreshInterval,
|
||||
queries,
|
||||
correlations,
|
||||
datasourceInstance != null && hasLogsVolumeSupport(datasourceInstance)
|
||||
datasourceInstance != null &&
|
||||
(hasSupplementaryQuerySupport(datasourceInstance, SupplementaryQueryType.LogsVolume) ||
|
||||
hasLogsVolumeSupport(datasourceInstance))
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -529,7 +539,9 @@ export const runQueries = (
|
||||
refreshInterval,
|
||||
queries,
|
||||
correlations,
|
||||
datasourceInstance != null && hasLogsVolumeSupport(datasourceInstance)
|
||||
datasourceInstance != null &&
|
||||
(hasSupplementaryQuerySupport(datasourceInstance, SupplementaryQueryType.LogsVolume) ||
|
||||
hasLogsVolumeSupport(datasourceInstance))
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -570,61 +582,69 @@ export const runQueries = (
|
||||
});
|
||||
|
||||
if (live) {
|
||||
for (const type of SUPPLEMENTARY_QUERY_TYPES) {
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
dispatch(
|
||||
storeSupplementaryQueryDataProviderAction({
|
||||
cleanSupplementaryQueryDataProviderAction({
|
||||
exploreId,
|
||||
dataProvider: undefined,
|
||||
type,
|
||||
})
|
||||
);
|
||||
dispatch(cleanSupplementaryQueryAction({ exploreId, type }));
|
||||
}
|
||||
} else {
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
// We always prepare provider, even is supplementary query is disabled because when the user
|
||||
// enables the query, we need to load the data, so we need the provider
|
||||
if (hasSupplementaryQuerySupport(datasourceInstance, type)) {
|
||||
const dataProvider = datasourceInstance.getDataProvider(type, {
|
||||
...transaction.request,
|
||||
requestId: `${transaction.request.requestId}_${snakeCase(type)}`,
|
||||
});
|
||||
dispatch(
|
||||
storeSupplementaryQueryDataProviderAction({
|
||||
exploreId,
|
||||
type,
|
||||
dataProvider,
|
||||
})
|
||||
);
|
||||
|
||||
// In this whole part., we need to figure out
|
||||
// checking the type of enabled supp queries
|
||||
// then for which enabled supp queries has data source support
|
||||
// and then we need to run the supp queries
|
||||
// but we need to make sure that supp queries that dont work
|
||||
// return undefined provider
|
||||
// we should also make sure we store the type of provider that
|
||||
// was last stored
|
||||
} else if (hasLogsVolumeSupport(datasourceInstance)) {
|
||||
// we always prepare the logsVolumeProvider,
|
||||
// but we only load it, if the logs-volume-histogram is enabled.
|
||||
// (we need to have the logsVolumeProvider always actual,
|
||||
// even when the visuals are disabled, because when the user
|
||||
// enables the visuals again, we need to load the histogram,
|
||||
// so we need the provider)
|
||||
const sourceRequest = {
|
||||
...transaction.request,
|
||||
requestId: transaction.request.requestId + '_log_volume',
|
||||
};
|
||||
const type = SupplementaryQueryType.LogsVolume;
|
||||
const dataProvider = datasourceInstance.getLogsVolumeDataProvider(sourceRequest);
|
||||
dispatch(
|
||||
storeSupplementaryQueryDataProviderAction({
|
||||
exploreId,
|
||||
type,
|
||||
dataProvider,
|
||||
})
|
||||
);
|
||||
if (!canReuseSupplementaryQueryData(supplementaryQueries[type].data, queries, absoluteRange)) {
|
||||
dispatch(cleanSupplementaryQueryAction({ exploreId, type }));
|
||||
if (supplementaryQueries[type].enabled) {
|
||||
dispatch(loadSupplementaryQueryData(exploreId, type));
|
||||
}
|
||||
}
|
||||
// Code below (else if scenario) is for backward compatibility with data sources that don't support supplementary queries
|
||||
// TODO: Remove in next major version - v10 (https://github.com/grafana/grafana/issues/61845)
|
||||
} else if (hasLogsVolumeSupport(datasourceInstance) && type === SupplementaryQueryType.LogsVolume) {
|
||||
const dataProvider = datasourceInstance.getLogsVolumeDataProvider({
|
||||
...transaction.request,
|
||||
requestId: `${transaction.request.requestId}_${snakeCase(type)}`,
|
||||
});
|
||||
dispatch(
|
||||
storeSupplementaryQueryDataProviderAction({
|
||||
exploreId,
|
||||
type,
|
||||
dataProvider,
|
||||
})
|
||||
);
|
||||
|
||||
const { supplementaryQueries, absoluteRange } = getState().explore[exploreId]!;
|
||||
if (!canReuseSupplementaryQueryData(supplementaryQueries[type].data, queries, absoluteRange)) {
|
||||
dispatch(cleanSupplementaryQueryAction({ exploreId, type }));
|
||||
if (supplementaryQueries[type].enabled) {
|
||||
dispatch(loadSupplementaryQueryData(exploreId, type));
|
||||
if (!canReuseSupplementaryQueryData(supplementaryQueries[type].data, queries, absoluteRange)) {
|
||||
dispatch(cleanSupplementaryQueryAction({ exploreId, type }));
|
||||
if (supplementaryQueries[type].enabled) {
|
||||
dispatch(loadSupplementaryQueryData(exploreId, type));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If data source instance doesn't support this supplementary query, we clean the data provider
|
||||
dispatch(
|
||||
cleanSupplementaryQueryDataProviderAction({
|
||||
exploreId,
|
||||
type,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
dispatch(
|
||||
storeSupplementaryQueryDataProviderAction({
|
||||
exploreId,
|
||||
dataProvider: undefined,
|
||||
type: SupplementaryQueryType.LogsVolume,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -844,6 +864,26 @@ export const queryReducer = (state: ExploreItemState, action: AnyAction): Explor
|
||||
};
|
||||
}
|
||||
|
||||
if (cleanSupplementaryQueryDataProviderAction.match(action)) {
|
||||
const { type } = action.payload;
|
||||
const { supplementaryQueries } = state;
|
||||
const supplementaryQuery = supplementaryQueries[type];
|
||||
|
||||
if (supplementaryQuery?.dataSubscription) {
|
||||
supplementaryQuery.dataSubscription.unsubscribe();
|
||||
}
|
||||
|
||||
const nextSupplementaryQueries = {
|
||||
...supplementaryQueries,
|
||||
[type]: { ...supplementaryQuery, dataProvider: undefined, dataSubscription: undefined },
|
||||
};
|
||||
|
||||
return {
|
||||
...state,
|
||||
supplementaryQueries: nextSupplementaryQueries,
|
||||
};
|
||||
}
|
||||
|
||||
if (cleanSupplementaryQueryAction.match(action)) {
|
||||
const { type } = action.payload;
|
||||
const { supplementaryQueries } = state;
|
||||
|
@ -12,22 +12,14 @@ import {
|
||||
PanelData,
|
||||
} from '@grafana/data';
|
||||
import { ExplorePanelData } from 'app/types';
|
||||
import { ExploreItemState, SupplementaryQueries, SupplementaryQueryType } from 'app/types/explore';
|
||||
import { ExploreItemState } from 'app/types/explore';
|
||||
|
||||
import store from '../../../core/store';
|
||||
import { clearQueryKeys, lastUsedDatasourceKeyForOrgId } from '../../../core/utils/explore';
|
||||
import { getDatasourceSrv } from '../../plugins/datasource_srv';
|
||||
import { SETTINGS_KEYS } from '../utils/logs';
|
||||
import { loadSupplementaryQueries } from '../utils/supplementaryQueries';
|
||||
import { toRawTimeRange } from '../utils/time';
|
||||
|
||||
export const SUPPLEMENTARY_QUERY_TYPES: SupplementaryQueryType[] = [SupplementaryQueryType.LogsVolume];
|
||||
|
||||
// Used to match supplementaryQueryType to corresponding local storage key
|
||||
// TODO: Remove this and unify enum values with SETTINGS_KEYS.enableVolumeHistogram
|
||||
const supplementaryQuerySettings: { [key in SupplementaryQueryType]: string } = {
|
||||
[SupplementaryQueryType.LogsVolume]: SETTINGS_KEYS.enableVolumeHistogram,
|
||||
};
|
||||
|
||||
export const DEFAULT_RANGE = {
|
||||
from: 'now-6h',
|
||||
to: 'now',
|
||||
@ -38,27 +30,6 @@ export const storeGraphStyle = (graphStyle: string): void => {
|
||||
store.set(GRAPH_STYLE_KEY, graphStyle);
|
||||
};
|
||||
|
||||
export const storeSupplementaryQueryEnabled = (enabled: boolean, type: SupplementaryQueryType): void => {
|
||||
if (supplementaryQuerySettings[type]) {
|
||||
store.set(supplementaryQuerySettings[type], enabled ? 'true' : 'false');
|
||||
}
|
||||
};
|
||||
|
||||
export const loadSupplementaryQueries = (): SupplementaryQueries => {
|
||||
// We default to true for all supp queries
|
||||
let supplementaryQueries: SupplementaryQueries = {
|
||||
[SupplementaryQueryType.LogsVolume]: { enabled: true },
|
||||
};
|
||||
|
||||
for (const type of SUPPLEMENTARY_QUERY_TYPES) {
|
||||
// Only if "false" value in local storage, we disable it
|
||||
if (store.get(supplementaryQuerySettings[type]) === 'false') {
|
||||
supplementaryQueries[type] = { enabled: false };
|
||||
}
|
||||
}
|
||||
return supplementaryQueries;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns a fresh Explore area state
|
||||
*/
|
||||
|
@ -4,5 +4,4 @@ export const SETTINGS_KEYS = {
|
||||
wrapLogMessage: 'grafana.explore.logs.wrapLogMessage',
|
||||
prettifyLogMessage: 'grafana.explore.logs.prettifyLogMessage',
|
||||
logsSortOrder: 'grafana.explore.logs.sortOrder',
|
||||
enableVolumeHistogram: 'grafana.explore.logs.enableVolumeHistogram',
|
||||
};
|
||||
|
47
public/app/features/explore/utils/supplementaryQueries.ts
Normal file
47
public/app/features/explore/utils/supplementaryQueries.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { SupplementaryQueryType } from '@grafana/data';
|
||||
import store from 'app/core/store';
|
||||
import { SupplementaryQueries } from 'app/types';
|
||||
|
||||
export const supplementaryQueryTypes: SupplementaryQueryType[] = [
|
||||
SupplementaryQueryType.LogsVolume,
|
||||
SupplementaryQueryType.LogsSample,
|
||||
];
|
||||
|
||||
const getSupplementaryQuerySettingKey = (type: SupplementaryQueryType) => `grafana.explore.logs.enable${type}`;
|
||||
|
||||
export const storeSupplementaryQueryEnabled = (enabled: boolean, type: SupplementaryQueryType): void => {
|
||||
store.set(getSupplementaryQuerySettingKey(type), enabled ? 'true' : 'false');
|
||||
};
|
||||
|
||||
export const loadSupplementaryQueries = (): SupplementaryQueries => {
|
||||
// We default to true for all supp queries
|
||||
let supplementaryQueries: SupplementaryQueries = {
|
||||
[SupplementaryQueryType.LogsVolume]: { enabled: true },
|
||||
// This is set to false temporarily, until we have UI to display logs sample and a way how to enable/disable it
|
||||
[SupplementaryQueryType.LogsSample]: { enabled: false },
|
||||
};
|
||||
|
||||
for (const type of supplementaryQueryTypes) {
|
||||
if (type === SupplementaryQueryType.LogsVolume) {
|
||||
// TODO: Remove this in 10.0 (#61626)
|
||||
// For LogsVolume we need to migrate old key to new key. So check for old key:
|
||||
// If we have old key: 1) use it 2) migrate to new key 3) delete old key
|
||||
// If not, continue with new key
|
||||
const oldLogsVolumeEnabledKey = 'grafana.explore.logs.enableVolumeHistogram';
|
||||
const shouldBeEnabled = store.get(oldLogsVolumeEnabledKey);
|
||||
if (shouldBeEnabled) {
|
||||
supplementaryQueries[type] = { enabled: shouldBeEnabled === 'true' ? true : false };
|
||||
storeSupplementaryQueryEnabled(shouldBeEnabled === 'true', SupplementaryQueryType.LogsVolume);
|
||||
localStorage.removeItem(oldLogsVolumeEnabledKey);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Only if "false" value in local storage, we disable it
|
||||
const shouldBeEnabled = store.get(getSupplementaryQuerySettingKey(type));
|
||||
if (shouldBeEnabled === 'false') {
|
||||
supplementaryQueries[type] = { enabled: false };
|
||||
}
|
||||
}
|
||||
return supplementaryQueries;
|
||||
};
|
@ -10,7 +10,7 @@ import {
|
||||
DataSourceInstanceSettings,
|
||||
DataSourceWithLogsContextSupport,
|
||||
DataSourceWithQueryImportSupport,
|
||||
DataSourceWithLogsVolumeSupport,
|
||||
DataSourceWithSupplementaryQueriesSupport,
|
||||
DateTime,
|
||||
dateTime,
|
||||
Field,
|
||||
@ -24,6 +24,7 @@ import {
|
||||
toUtc,
|
||||
QueryFixAction,
|
||||
CoreApp,
|
||||
SupplementaryQueryType,
|
||||
} from '@grafana/data';
|
||||
import { BackendSrvRequest, DataSourceWithBackend, getBackendSrv, getDataSourceSrv, config } from '@grafana/runtime';
|
||||
import { queryLogsVolume } from 'app/core/logsModel';
|
||||
@ -74,7 +75,7 @@ export class ElasticDatasource
|
||||
implements
|
||||
DataSourceWithLogsContextSupport,
|
||||
DataSourceWithQueryImportSupport<ElasticsearchQuery>,
|
||||
DataSourceWithLogsVolumeSupport<ElasticsearchQuery>
|
||||
DataSourceWithSupplementaryQueriesSupport<ElasticsearchQuery>
|
||||
{
|
||||
basicAuth?: string;
|
||||
withCredentials?: boolean;
|
||||
@ -580,6 +581,25 @@ export class ElasticDatasource
|
||||
return logResponse;
|
||||
};
|
||||
|
||||
getDataProvider(
|
||||
type: SupplementaryQueryType,
|
||||
request: DataQueryRequest<ElasticsearchQuery>
|
||||
): Observable<DataQueryResponse> | undefined {
|
||||
if (!this.getSupportedSupplementaryQueryTypes().includes(type)) {
|
||||
return undefined;
|
||||
}
|
||||
switch (type) {
|
||||
case SupplementaryQueryType.LogsVolume:
|
||||
return this.getLogsVolumeDataProvider(request);
|
||||
default:
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
getSupportedSupplementaryQueryTypes(): SupplementaryQueryType[] {
|
||||
return [SupplementaryQueryType.LogsVolume];
|
||||
}
|
||||
|
||||
getLogsVolumeDataProvider(request: DataQueryRequest<ElasticsearchQuery>): Observable<DataQueryResponse> | undefined {
|
||||
const isLogsVolumeAvailable = request.targets.some((target) => {
|
||||
return target.metrics?.length === 1 && target.metrics[0].type === 'logs';
|
||||
|
@ -15,6 +15,7 @@ import {
|
||||
FieldType,
|
||||
LogRowModel,
|
||||
MutableDataFrame,
|
||||
SupplementaryQueryType,
|
||||
} from '@grafana/data';
|
||||
import {
|
||||
BackendSrv,
|
||||
@ -896,7 +897,7 @@ describe('LokiDatasource', () => {
|
||||
targets: [{ expr: '{label=value}', refId: 'A' }],
|
||||
});
|
||||
|
||||
expect(ds.getLogsVolumeDataProvider(options)).toBeDefined();
|
||||
expect(ds.getDataProvider(SupplementaryQueryType.LogsVolume, options)).toBeDefined();
|
||||
});
|
||||
|
||||
it('does not create provider for metrics query', () => {
|
||||
@ -904,7 +905,7 @@ describe('LokiDatasource', () => {
|
||||
targets: [{ expr: 'rate({label=value}[1m])', refId: 'A' }],
|
||||
});
|
||||
|
||||
expect(ds.getLogsVolumeDataProvider(options)).not.toBeDefined();
|
||||
expect(ds.getDataProvider(SupplementaryQueryType.LogsVolume, options)).not.toBeDefined();
|
||||
});
|
||||
|
||||
it('creates provider if at least one query is a logs query', () => {
|
||||
@ -915,7 +916,7 @@ describe('LokiDatasource', () => {
|
||||
],
|
||||
});
|
||||
|
||||
expect(ds.getLogsVolumeDataProvider(options)).toBeDefined();
|
||||
expect(ds.getDataProvider(SupplementaryQueryType.LogsVolume, options)).toBeDefined();
|
||||
});
|
||||
|
||||
it('does not create provider if there is only an instant logs query', () => {
|
||||
@ -923,7 +924,41 @@ describe('LokiDatasource', () => {
|
||||
targets: [{ expr: '{label=value', refId: 'A', queryType: LokiQueryType.Instant }],
|
||||
});
|
||||
|
||||
expect(ds.getLogsVolumeDataProvider(options)).not.toBeDefined();
|
||||
expect(ds.getDataProvider(SupplementaryQueryType.LogsVolume, options)).not.toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('logs sample data provider', () => {
|
||||
let ds: LokiDatasource;
|
||||
beforeEach(() => {
|
||||
ds = createLokiDatasource(templateSrvStub);
|
||||
});
|
||||
|
||||
it('creates provider for metrics query', () => {
|
||||
const options = getQueryOptions<LokiQuery>({
|
||||
targets: [{ expr: 'rate({label=value}[5m])', refId: 'A' }],
|
||||
});
|
||||
|
||||
expect(ds.getDataProvider(SupplementaryQueryType.LogsSample, options)).toBeDefined();
|
||||
});
|
||||
|
||||
it('does not create provider for log query', () => {
|
||||
const options = getQueryOptions<LokiQuery>({
|
||||
targets: [{ expr: '{label=value}', refId: 'A' }],
|
||||
});
|
||||
|
||||
expect(ds.getDataProvider(SupplementaryQueryType.LogsSample, options)).not.toBeDefined();
|
||||
});
|
||||
|
||||
it('creates provider if at least one query is a metric query', () => {
|
||||
const options = getQueryOptions<LokiQuery>({
|
||||
targets: [
|
||||
{ expr: 'rate({label=value}[1m])', refId: 'A' },
|
||||
{ expr: '{label=value}', refId: 'B' },
|
||||
],
|
||||
});
|
||||
|
||||
expect(ds.getDataProvider(SupplementaryQueryType.LogsSample, options)).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -14,7 +14,8 @@ import {
|
||||
DataQueryResponse,
|
||||
DataSourceInstanceSettings,
|
||||
DataSourceWithLogsContextSupport,
|
||||
DataSourceWithLogsVolumeSupport,
|
||||
DataSourceWithSupplementaryQueriesSupport,
|
||||
SupplementaryQueryType,
|
||||
DataSourceWithQueryExportSupport,
|
||||
DataSourceWithQueryImportSupport,
|
||||
dateMath,
|
||||
@ -34,7 +35,7 @@ import {
|
||||
toUtc,
|
||||
} from '@grafana/data';
|
||||
import { config, DataSourceWithBackend, FetchError } from '@grafana/runtime';
|
||||
import { queryLogsVolume } from 'app/core/logsModel';
|
||||
import { queryLogsSample, queryLogsVolume } from 'app/core/logsModel';
|
||||
import { convertToWebSocketUrl } from 'app/core/utils/explore';
|
||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
@ -65,7 +66,7 @@ import {
|
||||
getLabelFilterPositions,
|
||||
} from './modifyQuery';
|
||||
import { getQueryHints } from './queryHints';
|
||||
import { getNormalizedLokiQuery, isLogsQuery, isValidQuery } from './queryUtils';
|
||||
import { getLogQueryFromMetricsQuery, getNormalizedLokiQuery, isLogsQuery, isValidQuery } from './queryUtils';
|
||||
import { sortDataFrameByTime } from './sortDataFrame';
|
||||
import { doLokiChannelStream } from './streaming';
|
||||
import { trackQuery } from './tracking';
|
||||
@ -86,6 +87,7 @@ export const REF_ID_DATA_SAMPLES = 'loki-data-samples';
|
||||
export const REF_ID_STARTER_ANNOTATION = 'annotation-';
|
||||
export const REF_ID_STARTER_LOG_ROW_CONTEXT = 'log-row-context-query-';
|
||||
export const REF_ID_STARTER_LOG_VOLUME = 'log-volume-';
|
||||
export const REF_ID_STARTER_LOG_SAMPLE = 'log-sample-';
|
||||
const NS_IN_MS = 1000000;
|
||||
|
||||
function makeRequest(
|
||||
@ -114,7 +116,7 @@ export class LokiDatasource
|
||||
extends DataSourceWithBackend<LokiQuery, LokiOptions>
|
||||
implements
|
||||
DataSourceWithLogsContextSupport,
|
||||
DataSourceWithLogsVolumeSupport<LokiQuery>,
|
||||
DataSourceWithSupplementaryQueriesSupport<LokiQuery>,
|
||||
DataSourceWithQueryImportSupport<LokiQuery>,
|
||||
DataSourceWithQueryExportSupport<LokiQuery>
|
||||
{
|
||||
@ -138,6 +140,27 @@ export class LokiDatasource
|
||||
this.variables = new LokiVariableSupport(this);
|
||||
}
|
||||
|
||||
getDataProvider(
|
||||
type: SupplementaryQueryType,
|
||||
request: DataQueryRequest<LokiQuery>
|
||||
): Observable<DataQueryResponse> | undefined {
|
||||
if (!this.getSupportedSupplementaryQueryTypes().includes(type)) {
|
||||
return undefined;
|
||||
}
|
||||
switch (type) {
|
||||
case SupplementaryQueryType.LogsVolume:
|
||||
return this.getLogsVolumeDataProvider(request);
|
||||
case SupplementaryQueryType.LogsSample:
|
||||
return this.getLogsSampleDataProvider(request);
|
||||
default:
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
getSupportedSupplementaryQueryTypes(): SupplementaryQueryType[] {
|
||||
return [SupplementaryQueryType.LogsVolume, SupplementaryQueryType.LogsSample];
|
||||
}
|
||||
|
||||
getLogsVolumeDataProvider(request: DataQueryRequest<LokiQuery>): Observable<DataQueryResponse> | undefined {
|
||||
const isQuerySuitable = (query: LokiQuery) => {
|
||||
const normalized = getNormalizedLokiQuery(query);
|
||||
@ -171,6 +194,31 @@ export class LokiDatasource
|
||||
});
|
||||
}
|
||||
|
||||
getLogsSampleDataProvider(request: DataQueryRequest<LokiQuery>): Observable<DataQueryResponse> | undefined {
|
||||
const isQuerySuitable = (query: LokiQuery) => {
|
||||
return query.expr && !isLogsQuery(query.expr);
|
||||
};
|
||||
|
||||
const isLogsSampleAvailable = request.targets.some(isQuerySuitable);
|
||||
|
||||
if (!isLogsSampleAvailable) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const logsSampleRequest = cloneDeep(request);
|
||||
logsSampleRequest.targets = logsSampleRequest.targets.filter(isQuerySuitable).map((target) => {
|
||||
const query = removeCommentsFromQuery(target.expr);
|
||||
return {
|
||||
...target,
|
||||
refId: `${REF_ID_STARTER_LOG_SAMPLE}${target.refId}`,
|
||||
expr: getLogQueryFromMetricsQuery(query),
|
||||
maxLines: 100,
|
||||
};
|
||||
});
|
||||
|
||||
return queryLogsSample(this, logsSampleRequest);
|
||||
}
|
||||
|
||||
query(request: DataQueryRequest<LokiQuery>): Observable<DataQueryResponse> {
|
||||
const queries = request.targets
|
||||
.map(getNormalizedLokiQuery) // "fix" the `.queryType` prop
|
||||
|
@ -15,6 +15,7 @@ import {
|
||||
EventBusExtended,
|
||||
DataQueryResponse,
|
||||
ExplorePanelsState,
|
||||
SupplementaryQueryType,
|
||||
} from '@grafana/data';
|
||||
import { RichHistorySearchFilters, RichHistorySettings } from 'app/core/utils/richHistoryTypes';
|
||||
|
||||
@ -279,7 +280,3 @@ export interface SupplementaryQuery {
|
||||
export type SupplementaryQueries = {
|
||||
[key in SupplementaryQueryType]: SupplementaryQuery;
|
||||
};
|
||||
|
||||
export enum SupplementaryQueryType {
|
||||
LogsVolume = 'LogsVolume',
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user