mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Explore: Support wide data frames (#28393)
* Change how isTimeSeries work * Simplify the decorators and update tests
This commit is contained in:
parent
0bb33839f5
commit
8f4be08b00
@ -5,6 +5,12 @@ export const initialState: AppNotificationsState = {
|
||||
appNotifications: [] as AppNotification[],
|
||||
};
|
||||
|
||||
/**
|
||||
* Reducer and action to show toast notifications of various types (success, warnings, errors etc). Use to show
|
||||
* transient info to user, like errors that cannot be otherwise handled or success after an action.
|
||||
*
|
||||
* Use factory functions in core/copy/appNotifications to create the payload.
|
||||
*/
|
||||
const appNotificationsSlice = createSlice({
|
||||
name: 'appNotifications',
|
||||
initialState,
|
||||
|
@ -1,5 +1,5 @@
|
||||
// Libraries
|
||||
import { map, throttleTime } from 'rxjs/operators';
|
||||
import { map, mergeMap, throttleTime } from 'rxjs/operators';
|
||||
import { identity } from 'rxjs';
|
||||
import { PayloadAction } from '@reduxjs/toolkit';
|
||||
import { DataSourceSrv } from '@grafana/runtime';
|
||||
@ -84,7 +84,7 @@ import {
|
||||
} from './actionTypes';
|
||||
import { getTimeZone } from 'app/features/profile/state/selectors';
|
||||
import { getShiftedTimeRange } from 'app/core/utils/timePicker';
|
||||
import { updateLocation } from '../../../core/actions';
|
||||
import { notifyApp, updateLocation } from '../../../core/actions';
|
||||
import { getTimeSrv, TimeSrv } from '../../dashboard/services/TimeSrv';
|
||||
import { preProcessPanelData, runRequest } from '../../dashboard/state/runRequest';
|
||||
import { DashboardModel, PanelModel } from 'app/features/dashboard/state';
|
||||
@ -96,6 +96,7 @@ import {
|
||||
decorateWithLogsResult,
|
||||
decorateWithTableResult,
|
||||
} from '../utils/decorators';
|
||||
import { createErrorNotification } from '../../../core/copy/appNotification';
|
||||
|
||||
/**
|
||||
* Adds a query row after the row with the given index.
|
||||
@ -427,6 +428,8 @@ export const runQueries = (exploreId: ExploreId): ThunkResult<void> => {
|
||||
queryResponse,
|
||||
querySubscription,
|
||||
history,
|
||||
refreshInterval,
|
||||
absoluteRange,
|
||||
} = exploreItemState;
|
||||
|
||||
if (!hasNonEmptyQuery(queries)) {
|
||||
@ -473,47 +476,54 @@ export const runQueries = (exploreId: ExploreId): ThunkResult<void> => {
|
||||
// actually can see what is happening.
|
||||
live ? throttleTime(500) : identity,
|
||||
map((data: PanelData) => preProcessPanelData(data, queryResponse)),
|
||||
decorateWithGraphLogsTraceAndTable(getState().explore[exploreId].datasourceInstance),
|
||||
decorateWithGraphResult(),
|
||||
decorateWithTableResult(),
|
||||
decorateWithLogsResult(getState().explore[exploreId])
|
||||
map(decorateWithGraphLogsTraceAndTable),
|
||||
map(decorateWithGraphResult),
|
||||
map(decorateWithLogsResult({ absoluteRange, refreshInterval })),
|
||||
mergeMap(decorateWithTableResult)
|
||||
)
|
||||
.subscribe(data => {
|
||||
if (!data.error && firstResponse) {
|
||||
// Side-effect: Saving history in localstorage
|
||||
const nextHistory = updateHistory(history, datasourceId, queries);
|
||||
const nextRichHistory = addToRichHistory(
|
||||
richHistory || [],
|
||||
datasourceId,
|
||||
datasourceName,
|
||||
queries,
|
||||
false,
|
||||
'',
|
||||
''
|
||||
);
|
||||
dispatch(historyUpdatedAction({ exploreId, history: nextHistory }));
|
||||
dispatch(richHistoryUpdatedAction({ richHistory: nextRichHistory }));
|
||||
.subscribe(
|
||||
data => {
|
||||
if (!data.error && firstResponse) {
|
||||
// Side-effect: Saving history in localstorage
|
||||
const nextHistory = updateHistory(history, datasourceId, queries);
|
||||
const nextRichHistory = addToRichHistory(
|
||||
richHistory || [],
|
||||
datasourceId,
|
||||
datasourceName,
|
||||
queries,
|
||||
false,
|
||||
'',
|
||||
''
|
||||
);
|
||||
dispatch(historyUpdatedAction({ exploreId, history: nextHistory }));
|
||||
dispatch(richHistoryUpdatedAction({ richHistory: nextRichHistory }));
|
||||
|
||||
// We save queries to the URL here so that only successfully run queries change the URL.
|
||||
dispatch(stateSave());
|
||||
}
|
||||
|
||||
firstResponse = false;
|
||||
|
||||
dispatch(queryStreamUpdatedAction({ exploreId, response: data }));
|
||||
|
||||
// Keep scanning for results if this was the last scanning transaction
|
||||
if (getState().explore[exploreId].scanning) {
|
||||
if (data.state === LoadingState.Done && data.series.length === 0) {
|
||||
const range = getShiftedTimeRange(-1, getState().explore[exploreId].range);
|
||||
dispatch(updateTime({ exploreId, absoluteRange: range }));
|
||||
dispatch(runQueries(exploreId));
|
||||
} else {
|
||||
// We can stop scanning if we have a result
|
||||
dispatch(scanStopAction({ exploreId }));
|
||||
// We save queries to the URL here so that only successfully run queries change the URL.
|
||||
dispatch(stateSave());
|
||||
}
|
||||
|
||||
firstResponse = false;
|
||||
|
||||
dispatch(queryStreamUpdatedAction({ exploreId, response: data }));
|
||||
|
||||
// Keep scanning for results if this was the last scanning transaction
|
||||
if (getState().explore[exploreId].scanning) {
|
||||
if (data.state === LoadingState.Done && data.series.length === 0) {
|
||||
const range = getShiftedTimeRange(-1, getState().explore[exploreId].range);
|
||||
dispatch(updateTime({ exploreId, absoluteRange: range }));
|
||||
dispatch(runQueries(exploreId));
|
||||
} else {
|
||||
// We can stop scanning if we have a result
|
||||
dispatch(scanStopAction({ exploreId }));
|
||||
}
|
||||
}
|
||||
},
|
||||
error => {
|
||||
dispatch(notifyApp(createErrorNotification('Query processing error', error)));
|
||||
dispatch(changeLoadingStateAction({ exploreId, loadingState: LoadingState.Error }));
|
||||
console.error(error);
|
||||
}
|
||||
});
|
||||
);
|
||||
|
||||
dispatch(queryStoreSubscriptionAction({ exploreId, querySubscription: newQuerySub }));
|
||||
};
|
||||
|
@ -3,15 +3,12 @@ jest.mock('@grafana/data/src/datetime/formatter', () => ({
|
||||
dateTimeFormatTimeAgo: (ts: any) => 'fromNow() jest mocked',
|
||||
}));
|
||||
|
||||
import { of } from 'rxjs';
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataQueryRequest,
|
||||
DataSourceApi,
|
||||
FieldType,
|
||||
LoadingState,
|
||||
observableTester,
|
||||
PanelData,
|
||||
TimeRange,
|
||||
toDataFrame,
|
||||
@ -24,7 +21,7 @@ import {
|
||||
decorateWithTableResult,
|
||||
} from './decorators';
|
||||
import { describe } from '../../../../test/lib/common';
|
||||
import { ExploreItemState, ExplorePanelData } from 'app/types';
|
||||
import { ExplorePanelData } from 'app/types';
|
||||
import TableModel from 'app/core/table_model';
|
||||
|
||||
const getTestContext = () => {
|
||||
@ -37,6 +34,7 @@ const getTestContext = () => {
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [100, 200, 300] },
|
||||
{ name: 'A-series', type: FieldType.number, values: [4, 5, 6] },
|
||||
{ name: 'B-series', type: FieldType.number, values: [7, 8, 9] },
|
||||
],
|
||||
});
|
||||
|
||||
@ -86,450 +84,337 @@ const createExplorePanelData = (args: Partial<ExplorePanelData>): ExplorePanelDa
|
||||
};
|
||||
|
||||
describe('decorateWithGraphLogsTraceAndTable', () => {
|
||||
describe('when used without error', () => {
|
||||
it('then the result should be correct', done => {
|
||||
const { table, logs, timeSeries, emptyTable } = getTestContext();
|
||||
const datasourceInstance = ({ meta: { id: 'prometheus' } } as unknown) as DataSourceApi;
|
||||
const series = [table, logs, timeSeries, emptyTable];
|
||||
const panelData: PanelData = {
|
||||
series,
|
||||
state: LoadingState.Done,
|
||||
timeRange: ({} as unknown) as TimeRange,
|
||||
};
|
||||
it('should correctly classify the dataFrames', () => {
|
||||
const { table, logs, timeSeries, emptyTable } = getTestContext();
|
||||
const series = [table, logs, timeSeries, emptyTable];
|
||||
const panelData: PanelData = {
|
||||
series,
|
||||
state: LoadingState.Done,
|
||||
timeRange: ({} as unknown) as TimeRange,
|
||||
};
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithGraphLogsTraceAndTable(datasourceInstance)),
|
||||
expect: value => {
|
||||
expect(value).toEqual({
|
||||
series,
|
||||
state: LoadingState.Done,
|
||||
timeRange: {},
|
||||
graphFrames: [timeSeries],
|
||||
tableFrames: [table, emptyTable],
|
||||
logsFrames: [logs],
|
||||
traceFrames: [],
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
});
|
||||
},
|
||||
done,
|
||||
});
|
||||
expect(decorateWithGraphLogsTraceAndTable(panelData)).toEqual({
|
||||
series,
|
||||
state: LoadingState.Done,
|
||||
timeRange: {},
|
||||
graphFrames: [timeSeries],
|
||||
tableFrames: [table, emptyTable],
|
||||
logsFrames: [logs],
|
||||
traceFrames: [],
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
});
|
||||
});
|
||||
|
||||
describe('when used without frames', () => {
|
||||
it('then the result should be correct', done => {
|
||||
const datasourceInstance = ({ meta: { id: 'prometheus' } } as unknown) as DataSourceApi;
|
||||
const series: DataFrame[] = [];
|
||||
const panelData: PanelData = {
|
||||
series,
|
||||
state: LoadingState.Done,
|
||||
timeRange: ({} as unknown) as TimeRange,
|
||||
};
|
||||
it('should handle empty array', () => {
|
||||
const series: DataFrame[] = [];
|
||||
const panelData: PanelData = {
|
||||
series,
|
||||
state: LoadingState.Done,
|
||||
timeRange: ({} as unknown) as TimeRange,
|
||||
};
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithGraphLogsTraceAndTable(datasourceInstance)),
|
||||
expect: value => {
|
||||
expect(value).toEqual({
|
||||
series: [],
|
||||
state: LoadingState.Done,
|
||||
timeRange: {},
|
||||
graphFrames: [],
|
||||
tableFrames: [],
|
||||
logsFrames: [],
|
||||
traceFrames: [],
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
});
|
||||
},
|
||||
done,
|
||||
});
|
||||
expect(decorateWithGraphLogsTraceAndTable(panelData)).toEqual({
|
||||
series: [],
|
||||
state: LoadingState.Done,
|
||||
timeRange: {},
|
||||
graphFrames: [],
|
||||
tableFrames: [],
|
||||
logsFrames: [],
|
||||
traceFrames: [],
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
});
|
||||
});
|
||||
|
||||
describe('when used with an error', () => {
|
||||
it('then the result should be correct', done => {
|
||||
const { timeSeries, logs, table } = getTestContext();
|
||||
const datasourceInstance = ({ meta: { id: 'prometheus' } } as unknown) as DataSourceApi;
|
||||
const series: DataFrame[] = [timeSeries, logs, table];
|
||||
const panelData: PanelData = {
|
||||
series,
|
||||
error: {},
|
||||
state: LoadingState.Error,
|
||||
timeRange: ({} as unknown) as TimeRange,
|
||||
};
|
||||
it('should handle query error', () => {
|
||||
const { timeSeries, logs, table } = getTestContext();
|
||||
const series: DataFrame[] = [timeSeries, logs, table];
|
||||
const panelData: PanelData = {
|
||||
series,
|
||||
error: {},
|
||||
state: LoadingState.Error,
|
||||
timeRange: ({} as unknown) as TimeRange,
|
||||
};
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithGraphLogsTraceAndTable(datasourceInstance)),
|
||||
expect: value => {
|
||||
expect(value).toEqual({
|
||||
series: [timeSeries, logs, table],
|
||||
error: {},
|
||||
state: LoadingState.Error,
|
||||
timeRange: {},
|
||||
graphFrames: [],
|
||||
tableFrames: [],
|
||||
logsFrames: [],
|
||||
traceFrames: [],
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
});
|
||||
},
|
||||
done,
|
||||
});
|
||||
expect(decorateWithGraphLogsTraceAndTable(panelData)).toEqual({
|
||||
series: [timeSeries, logs, table],
|
||||
error: {},
|
||||
state: LoadingState.Error,
|
||||
timeRange: {},
|
||||
graphFrames: [],
|
||||
tableFrames: [],
|
||||
logsFrames: [],
|
||||
traceFrames: [],
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('decorateWithGraphResult', () => {
|
||||
describe('when used without error', () => {
|
||||
it('then the graphResult should be correct', done => {
|
||||
const { timeSeries } = getTestContext();
|
||||
const timeField = timeSeries.fields[0];
|
||||
const valueField = timeSeries.fields[1];
|
||||
const panelData = createExplorePanelData({ graphFrames: [timeSeries] });
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithGraphResult()),
|
||||
expect: panelData => {
|
||||
expect(panelData.graphResult![0]).toEqual({
|
||||
label: 'A-series',
|
||||
color: '#7EB26D',
|
||||
data: [
|
||||
[100, 4],
|
||||
[200, 5],
|
||||
[300, 6],
|
||||
],
|
||||
info: [],
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
index: 1,
|
||||
},
|
||||
seriesIndex: 0,
|
||||
timeField,
|
||||
valueField,
|
||||
timeStep: 100,
|
||||
});
|
||||
it('should process the graph dataFrames', () => {
|
||||
const { timeSeries } = getTestContext();
|
||||
const panelData = createExplorePanelData({ graphFrames: [timeSeries] });
|
||||
console.log(decorateWithGraphResult(panelData).graphResult);
|
||||
expect(decorateWithGraphResult(panelData).graphResult).toMatchObject([
|
||||
{
|
||||
label: 'A-series',
|
||||
data: [
|
||||
[100, 4],
|
||||
[200, 5],
|
||||
[300, 6],
|
||||
],
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
index: 1,
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
seriesIndex: 0,
|
||||
timeStep: 100,
|
||||
},
|
||||
{
|
||||
label: 'B-series',
|
||||
data: [
|
||||
[100, 7],
|
||||
[200, 8],
|
||||
[300, 9],
|
||||
],
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
index: 1,
|
||||
},
|
||||
seriesIndex: 1,
|
||||
timeStep: 100,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
describe('when used without error but graph frames are empty', () => {
|
||||
it('then the graphResult should be null', done => {
|
||||
const panelData = createExplorePanelData({ graphFrames: [] });
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithGraphResult()),
|
||||
expect: panelData => {
|
||||
expect(panelData.graphResult).toBeNull();
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
it('returns null if it gets empty array', () => {
|
||||
const panelData = createExplorePanelData({ graphFrames: [] });
|
||||
expect(decorateWithGraphResult(panelData).graphResult).toBeNull();
|
||||
});
|
||||
|
||||
describe('when used with error', () => {
|
||||
it('then the graphResult should be null', done => {
|
||||
const { timeSeries } = getTestContext();
|
||||
const panelData = createExplorePanelData({ error: {}, graphFrames: [timeSeries] });
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithGraphResult()),
|
||||
expect: panelData => {
|
||||
expect(panelData.graphResult).toBeNull();
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
it('returns null if panelData has error', () => {
|
||||
const { timeSeries } = getTestContext();
|
||||
const panelData = createExplorePanelData({ error: {}, graphFrames: [timeSeries] });
|
||||
expect(decorateWithGraphResult(panelData).graphResult).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('decorateWithTableResult', () => {
|
||||
describe('when used without error', () => {
|
||||
it('then the tableResult should be correct', done => {
|
||||
const { table, emptyTable } = getTestContext();
|
||||
const panelData = createExplorePanelData({ tableFrames: [table, emptyTable] });
|
||||
it('should process table type dataFrame', async () => {
|
||||
const { table, emptyTable } = getTestContext();
|
||||
const panelData = createExplorePanelData({ tableFrames: [table, emptyTable] });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithTableResult()),
|
||||
expect: panelData => {
|
||||
let theResult = panelData.tableResult;
|
||||
let theResult = panelResult.tableResult;
|
||||
|
||||
expect(theResult?.fields[0].name).toEqual('value');
|
||||
expect(theResult?.fields[1].name).toEqual('time');
|
||||
expect(theResult?.fields[2].name).toEqual('tsNs');
|
||||
expect(theResult?.fields[3].name).toEqual('message');
|
||||
expect(theResult?.fields[1].display).not.toBeNull();
|
||||
expect(theResult?.length).toBe(3);
|
||||
expect(theResult?.fields[0].name).toEqual('value');
|
||||
expect(theResult?.fields[1].name).toEqual('time');
|
||||
expect(theResult?.fields[2].name).toEqual('tsNs');
|
||||
expect(theResult?.fields[3].name).toEqual('message');
|
||||
expect(theResult?.fields[1].display).not.toBeNull();
|
||||
expect(theResult?.length).toBe(3);
|
||||
|
||||
// I don't understand the purpose of the code below, feels like this belongs in toDataFrame tests?
|
||||
// Same data though a DataFrame
|
||||
theResult = toDataFrame(
|
||||
new TableModel({
|
||||
columns: [
|
||||
{ text: 'value', type: 'number' },
|
||||
{ text: 'time', type: 'time' },
|
||||
{ text: 'tsNs', type: 'time' },
|
||||
{ text: 'message', type: 'string' },
|
||||
],
|
||||
rows: [
|
||||
[4, 100, '100000000', 'this is a message'],
|
||||
[5, 200, '100000000', 'second message'],
|
||||
[6, 300, '100000000', 'third'],
|
||||
],
|
||||
type: 'table',
|
||||
})
|
||||
);
|
||||
expect(theResult.fields[0].name).toEqual('value');
|
||||
expect(theResult.fields[1].name).toEqual('time');
|
||||
expect(theResult.fields[2].name).toEqual('tsNs');
|
||||
expect(theResult.fields[3].name).toEqual('message');
|
||||
expect(theResult.fields[1].display).not.toBeNull();
|
||||
expect(theResult.length).toBe(3);
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
|
||||
it('should do join transform if all series are timeseries', done => {
|
||||
const tableFrames = [
|
||||
toDataFrame({
|
||||
name: 'A-series',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 200, 300] },
|
||||
{ name: 'A-series', type: FieldType.number, values: [4, 5, 6] },
|
||||
],
|
||||
}),
|
||||
toDataFrame({
|
||||
name: 'B-series',
|
||||
refId: 'B',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 200, 300] },
|
||||
{ name: 'B-series', type: FieldType.number, values: [4, 5, 6] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
const panelData = createExplorePanelData({ tableFrames });
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithTableResult()),
|
||||
expect: panelData => {
|
||||
const result = panelData.tableResult;
|
||||
|
||||
expect(result?.fields[0].name).toBe('Time');
|
||||
expect(result?.fields[1].name).toBe('A-series');
|
||||
expect(result?.fields[2].name).toBe('B-series');
|
||||
expect(result?.fields[0].values.toArray()).toEqual([100, 200, 300]);
|
||||
expect(result?.fields[1].values.toArray()).toEqual([4, 5, 6]);
|
||||
expect(result?.fields[2].values.toArray()).toEqual([4, 5, 6]);
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not override fields display property when filled', done => {
|
||||
const tableFrames = [
|
||||
toDataFrame({
|
||||
name: 'A-series',
|
||||
refId: 'A',
|
||||
fields: [{ name: 'Text', type: FieldType.string, values: ['someText'] }],
|
||||
}),
|
||||
];
|
||||
const displayFunctionMock = jest.fn();
|
||||
tableFrames[0].fields[0].display = displayFunctionMock;
|
||||
|
||||
const panelData = createExplorePanelData({ tableFrames });
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithTableResult()),
|
||||
expect: panelData => {
|
||||
const data = panelData.tableResult;
|
||||
expect(data?.fields[0].display).toBe(displayFunctionMock);
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
// I don't understand the purpose of the code below, feels like this belongs in toDataFrame tests?
|
||||
// Same data though a DataFrame
|
||||
theResult = toDataFrame(
|
||||
new TableModel({
|
||||
columns: [
|
||||
{ text: 'value', type: 'number' },
|
||||
{ text: 'time', type: 'time' },
|
||||
{ text: 'tsNs', type: 'time' },
|
||||
{ text: 'message', type: 'string' },
|
||||
],
|
||||
rows: [
|
||||
[4, 100, '100000000', 'this is a message'],
|
||||
[5, 200, '100000000', 'second message'],
|
||||
[6, 300, '100000000', 'third'],
|
||||
],
|
||||
type: 'table',
|
||||
})
|
||||
);
|
||||
expect(theResult.fields[0].name).toEqual('value');
|
||||
expect(theResult.fields[1].name).toEqual('time');
|
||||
expect(theResult.fields[2].name).toEqual('tsNs');
|
||||
expect(theResult.fields[3].name).toEqual('message');
|
||||
expect(theResult.fields[1].display).not.toBeNull();
|
||||
expect(theResult.length).toBe(3);
|
||||
});
|
||||
|
||||
describe('when used without error but table frames are empty', () => {
|
||||
it('then the tableResult should be null', done => {
|
||||
const panelData = createExplorePanelData({ tableFrames: [] });
|
||||
it('should do join transform if all series are timeseries', async () => {
|
||||
const tableFrames = [
|
||||
toDataFrame({
|
||||
name: 'A-series',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 200, 300] },
|
||||
{ name: 'A-series', type: FieldType.number, values: [4, 5, 6] },
|
||||
],
|
||||
}),
|
||||
toDataFrame({
|
||||
name: 'B-series',
|
||||
refId: 'B',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 200, 300] },
|
||||
{ name: 'B-series', type: FieldType.number, values: [4, 5, 6] },
|
||||
],
|
||||
}),
|
||||
];
|
||||
const panelData = createExplorePanelData({ tableFrames });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
const result = panelResult.tableResult;
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithTableResult()),
|
||||
expect: panelData => {
|
||||
expect(panelData.tableResult).toBeNull();
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
expect(result?.fields[0].name).toBe('Time');
|
||||
expect(result?.fields[1].name).toBe('A-series');
|
||||
expect(result?.fields[2].name).toBe('B-series');
|
||||
expect(result?.fields[0].values.toArray()).toEqual([100, 200, 300]);
|
||||
expect(result?.fields[1].values.toArray()).toEqual([4, 5, 6]);
|
||||
expect(result?.fields[2].values.toArray()).toEqual([4, 5, 6]);
|
||||
});
|
||||
|
||||
describe('when used with error', () => {
|
||||
it('then the tableResult should be null', done => {
|
||||
const { table, emptyTable } = getTestContext();
|
||||
const panelData = createExplorePanelData({ error: {}, tableFrames: [table, emptyTable] });
|
||||
it('should not override fields display property when filled', async () => {
|
||||
const tableFrames = [
|
||||
toDataFrame({
|
||||
name: 'A-series',
|
||||
refId: 'A',
|
||||
fields: [{ name: 'Text', type: FieldType.string, values: ['someText'] }],
|
||||
}),
|
||||
];
|
||||
const displayFunctionMock = jest.fn();
|
||||
tableFrames[0].fields[0].display = displayFunctionMock;
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithTableResult()),
|
||||
expect: panelData => {
|
||||
expect(panelData.tableResult).toBeNull();
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
const panelData = createExplorePanelData({ tableFrames });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
expect(panelResult.tableResult?.fields[0].display).toBe(displayFunctionMock);
|
||||
});
|
||||
|
||||
it('should return null when passed empty array', async () => {
|
||||
const panelData = createExplorePanelData({ tableFrames: [] });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
expect(panelResult.tableResult).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null if panelData has error', async () => {
|
||||
const { table, emptyTable } = getTestContext();
|
||||
const panelData = createExplorePanelData({ error: {}, tableFrames: [table, emptyTable] });
|
||||
const panelResult = await decorateWithTableResult(panelData).toPromise();
|
||||
expect(panelResult.tableResult).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('decorateWithLogsResult', () => {
|
||||
describe('when used without error', () => {
|
||||
it('then the logsResult should be correct', done => {
|
||||
const { logs } = getTestContext();
|
||||
const state = ({
|
||||
queryIntervals: { intervalMs: 10 },
|
||||
} as unknown) as ExploreItemState;
|
||||
const request = ({ timezone: 'utc', intervalMs: 60000 } as unknown) as DataQueryRequest;
|
||||
const panelData = createExplorePanelData({ logsFrames: [logs], request });
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithLogsResult(state)),
|
||||
expect: panelData => {
|
||||
const theResult = panelData.logsResult;
|
||||
|
||||
expect(theResult).toEqual({
|
||||
hasUniqueLabels: false,
|
||||
meta: [],
|
||||
rows: [
|
||||
{
|
||||
rowIndex: 0,
|
||||
dataFrame: logs,
|
||||
entry: 'this is a message',
|
||||
entryFieldIndex: 3,
|
||||
hasAnsi: false,
|
||||
labels: {},
|
||||
logLevel: 'unknown',
|
||||
raw: 'this is a message',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 100,
|
||||
timeEpochNs: '100000002',
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
uid: '0',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
rowIndex: 2,
|
||||
dataFrame: logs,
|
||||
entry: 'third',
|
||||
entryFieldIndex: 3,
|
||||
hasAnsi: false,
|
||||
labels: {},
|
||||
logLevel: 'unknown',
|
||||
raw: 'third',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 100,
|
||||
timeEpochNs: '100000001',
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
uid: '2',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
rowIndex: 1,
|
||||
dataFrame: logs,
|
||||
entry: 'second message',
|
||||
entryFieldIndex: 3,
|
||||
hasAnsi: false,
|
||||
labels: {},
|
||||
logLevel: 'unknown',
|
||||
raw: 'second message',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 100,
|
||||
timeEpochNs: '100000000',
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
uid: '1',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
],
|
||||
series: [
|
||||
{
|
||||
label: 'unknown',
|
||||
color: '#8e8e8e',
|
||||
data: [[0, 3]],
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
index: 1,
|
||||
min: 0,
|
||||
tickDecimals: 0,
|
||||
},
|
||||
seriesIndex: 0,
|
||||
timeField: {
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
config: {},
|
||||
values: new ArrayVector([0]),
|
||||
index: 0,
|
||||
display: expect.anything(),
|
||||
},
|
||||
valueField: {
|
||||
name: 'unknown',
|
||||
type: 'number',
|
||||
config: { unit: undefined, color: '#8e8e8e' },
|
||||
values: new ArrayVector([3]),
|
||||
labels: undefined,
|
||||
index: 1,
|
||||
display: expect.anything(),
|
||||
state: expect.anything(),
|
||||
},
|
||||
timeStep: 0,
|
||||
},
|
||||
],
|
||||
visibleRange: undefined,
|
||||
});
|
||||
it('should correctly transform logs dataFrames', () => {
|
||||
const { logs } = getTestContext();
|
||||
const request = ({ timezone: 'utc', intervalMs: 60000 } as unknown) as DataQueryRequest;
|
||||
const panelData = createExplorePanelData({ logsFrames: [logs], request });
|
||||
expect(decorateWithLogsResult()(panelData).logsResult).toEqual({
|
||||
hasUniqueLabels: false,
|
||||
meta: [],
|
||||
rows: [
|
||||
{
|
||||
rowIndex: 0,
|
||||
dataFrame: logs,
|
||||
entry: 'this is a message',
|
||||
entryFieldIndex: 3,
|
||||
hasAnsi: false,
|
||||
labels: {},
|
||||
logLevel: 'unknown',
|
||||
raw: 'this is a message',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 100,
|
||||
timeEpochNs: '100000002',
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
uid: '0',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
done,
|
||||
});
|
||||
{
|
||||
rowIndex: 2,
|
||||
dataFrame: logs,
|
||||
entry: 'third',
|
||||
entryFieldIndex: 3,
|
||||
hasAnsi: false,
|
||||
labels: {},
|
||||
logLevel: 'unknown',
|
||||
raw: 'third',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 100,
|
||||
timeEpochNs: '100000001',
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
uid: '2',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
rowIndex: 1,
|
||||
dataFrame: logs,
|
||||
entry: 'second message',
|
||||
entryFieldIndex: 3,
|
||||
hasAnsi: false,
|
||||
labels: {},
|
||||
logLevel: 'unknown',
|
||||
raw: 'second message',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 100,
|
||||
timeEpochNs: '100000000',
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
uid: '1',
|
||||
uniqueLabels: {},
|
||||
},
|
||||
],
|
||||
series: [
|
||||
{
|
||||
label: 'unknown',
|
||||
color: '#8e8e8e',
|
||||
data: [[0, 3]],
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
index: 1,
|
||||
min: 0,
|
||||
tickDecimals: 0,
|
||||
},
|
||||
seriesIndex: 0,
|
||||
timeField: {
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
config: {},
|
||||
values: new ArrayVector([0]),
|
||||
index: 0,
|
||||
display: expect.anything(),
|
||||
},
|
||||
valueField: {
|
||||
name: 'unknown',
|
||||
type: 'number',
|
||||
config: { unit: undefined, color: '#8e8e8e' },
|
||||
values: new ArrayVector([3]),
|
||||
labels: undefined,
|
||||
index: 1,
|
||||
display: expect.anything(),
|
||||
state: expect.anything(),
|
||||
},
|
||||
timeStep: 0,
|
||||
},
|
||||
],
|
||||
visibleRange: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
describe('when used without error but logs frames are empty', () => {
|
||||
it('then the graphResult should be null', done => {
|
||||
const panelData = createExplorePanelData({ logsFrames: [] });
|
||||
const state = ({} as unknown) as ExploreItemState;
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithLogsResult(state)),
|
||||
expect: panelData => {
|
||||
expect(panelData.logsResult).toBeNull();
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
it('returns null if passed empty array', () => {
|
||||
const panelData = createExplorePanelData({ logsFrames: [] });
|
||||
expect(decorateWithLogsResult()(panelData).logsResult).toBeNull();
|
||||
});
|
||||
|
||||
describe('when used with error', () => {
|
||||
it('then the graphResult should be null', done => {
|
||||
const { logs } = getTestContext();
|
||||
const panelData = createExplorePanelData({ error: {}, logsFrames: [logs] });
|
||||
const state = ({} as unknown) as ExploreItemState;
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: of(panelData).pipe(decorateWithLogsResult(state)),
|
||||
expect: panelData => {
|
||||
expect(panelData.logsResult).toBeNull();
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
it('returns null if panelData has error', () => {
|
||||
const { logs } = getTestContext();
|
||||
const panelData = createExplorePanelData({ error: {}, logsFrames: [logs] });
|
||||
expect(decorateWithLogsResult()(panelData).logsResult).toBeNull();
|
||||
});
|
||||
});
|
||||
|
@ -1,214 +1,187 @@
|
||||
import { MonoTypeOperatorFunction, of, OperatorFunction } from 'rxjs';
|
||||
import { map, mergeMap } from 'rxjs/operators';
|
||||
import { Observable, of } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
import {
|
||||
AbsoluteTimeRange,
|
||||
DataFrame,
|
||||
DataSourceApi,
|
||||
FieldType,
|
||||
getDisplayProcessor,
|
||||
PanelData,
|
||||
PreferredVisualisationType,
|
||||
sortLogsResult,
|
||||
standardTransformers,
|
||||
} from '@grafana/data';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { groupBy } from 'lodash';
|
||||
|
||||
import { ExploreItemState, ExplorePanelData } from '../../../types';
|
||||
import { ExplorePanelData } from '../../../types';
|
||||
import { getGraphSeriesModel } from '../../../plugins/panel/graph2/getGraphSeriesModel';
|
||||
import { dataFrameToLogsModel } from '../../../core/logs_model';
|
||||
import { refreshIntervalToSortOrder } from '../../../core/utils/explore';
|
||||
|
||||
export const decorateWithGraphLogsTraceAndTable = (
|
||||
datasourceInstance?: DataSourceApi | null
|
||||
): OperatorFunction<PanelData, ExplorePanelData> => inputStream =>
|
||||
inputStream.pipe(
|
||||
map(data => {
|
||||
if (data.error) {
|
||||
return {
|
||||
...data,
|
||||
graphFrames: [],
|
||||
tableFrames: [],
|
||||
logsFrames: [],
|
||||
traceFrames: [],
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* When processing response first we try to determine what kind of dataframes we got as one query can return multiple
|
||||
* dataFrames with different type of data. This is later used for type specific processing. As we use this in
|
||||
* Observable pipeline, it decorates the existing panelData to pass the results to later processing stages.
|
||||
*/
|
||||
export const decorateWithGraphLogsTraceAndTable = (data: PanelData): ExplorePanelData => {
|
||||
if (data.error) {
|
||||
return {
|
||||
...data,
|
||||
graphFrames: [],
|
||||
tableFrames: [],
|
||||
logsFrames: [],
|
||||
traceFrames: [],
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
};
|
||||
}
|
||||
|
||||
const graphFrames: DataFrame[] = [];
|
||||
const tableFrames: DataFrame[] = [];
|
||||
const logsFrames: DataFrame[] = [];
|
||||
const traceFrames: DataFrame[] = [];
|
||||
const graphFrames: DataFrame[] = [];
|
||||
const tableFrames: DataFrame[] = [];
|
||||
const logsFrames: DataFrame[] = [];
|
||||
const traceFrames: DataFrame[] = [];
|
||||
|
||||
for (const frame of data.series) {
|
||||
if (shouldShowInVisualisationTypeStrict(frame, 'logs')) {
|
||||
logsFrames.push(frame);
|
||||
} else if (shouldShowInVisualisationTypeStrict(frame, 'graph')) {
|
||||
for (const frame of data.series) {
|
||||
switch (frame.meta?.preferredVisualisationType) {
|
||||
case 'logs':
|
||||
logsFrames.push(frame);
|
||||
break;
|
||||
case 'graph':
|
||||
graphFrames.push(frame);
|
||||
break;
|
||||
case 'trace':
|
||||
traceFrames.push(frame);
|
||||
break;
|
||||
case 'table':
|
||||
tableFrames.push(frame);
|
||||
break;
|
||||
default:
|
||||
if (isTimeSeries(frame)) {
|
||||
graphFrames.push(frame);
|
||||
} else if (shouldShowInVisualisationTypeStrict(frame, 'trace')) {
|
||||
traceFrames.push(frame);
|
||||
} else if (shouldShowInVisualisationTypeStrict(frame, 'table')) {
|
||||
tableFrames.push(frame);
|
||||
} else if (isTimeSeries(frame, datasourceInstance?.meta.id)) {
|
||||
if (shouldShowInVisualisationType(frame, 'graph')) {
|
||||
graphFrames.push(frame);
|
||||
}
|
||||
if (shouldShowInVisualisationType(frame, 'table')) {
|
||||
tableFrames.push(frame);
|
||||
}
|
||||
} else {
|
||||
// We fallback to table if we do not have any better meta info about the dataframe.
|
||||
tableFrames.push(frame);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...data,
|
||||
graphFrames,
|
||||
tableFrames,
|
||||
logsFrames,
|
||||
traceFrames,
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
export const decorateWithGraphResult = (): MonoTypeOperatorFunction<ExplorePanelData> => inputStream =>
|
||||
inputStream.pipe(
|
||||
map(data => {
|
||||
if (data.error) {
|
||||
return { ...data, graphResult: null };
|
||||
}
|
||||
|
||||
const graphResult =
|
||||
data.graphFrames.length === 0
|
||||
? null
|
||||
: getGraphSeriesModel(
|
||||
data.graphFrames,
|
||||
data.request?.timezone ?? 'browser',
|
||||
{},
|
||||
{ showBars: false, showLines: true, showPoints: false },
|
||||
{ asTable: false, isVisible: true, placement: 'under' }
|
||||
);
|
||||
|
||||
return { ...data, graphResult };
|
||||
})
|
||||
);
|
||||
|
||||
export const decorateWithTableResult = (): MonoTypeOperatorFunction<ExplorePanelData> => inputStream =>
|
||||
inputStream.pipe(
|
||||
mergeMap(data => {
|
||||
if (data.error) {
|
||||
return of({ ...data, tableResult: null });
|
||||
}
|
||||
|
||||
if (data.tableFrames.length === 0) {
|
||||
return of({ ...data, tableResult: null });
|
||||
}
|
||||
|
||||
data.tableFrames.sort((frameA: DataFrame, frameB: DataFrame) => {
|
||||
const frameARefId = frameA.refId!;
|
||||
const frameBRefId = frameB.refId!;
|
||||
|
||||
if (frameARefId > frameBRefId) {
|
||||
return 1;
|
||||
}
|
||||
if (frameARefId < frameBRefId) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
const hasOnlyTimeseries = data.tableFrames.every(df => isTimeSeries(df));
|
||||
|
||||
// If we have only timeseries we do join on default time column which makes more sense. If we are showing
|
||||
// non timeseries or some mix of data we are not trying to join on anything and just try to merge them in
|
||||
// single table, which may not make sense in most cases, but it's up to the user to query something sensible.
|
||||
const transformer = hasOnlyTimeseries
|
||||
? of(data.tableFrames).pipe(standardTransformers.seriesToColumnsTransformer.operator({}))
|
||||
: of(data.tableFrames).pipe(standardTransformers.mergeTransformer.operator({}));
|
||||
|
||||
return transformer.pipe(
|
||||
map(frames => {
|
||||
const frame = frames[0];
|
||||
|
||||
// set display processor
|
||||
for (const field of frame.fields) {
|
||||
field.display =
|
||||
field.display ??
|
||||
getDisplayProcessor({
|
||||
field,
|
||||
theme: config.theme,
|
||||
timeZone: data.request?.timezone ?? 'browser',
|
||||
});
|
||||
}
|
||||
|
||||
return { ...data, tableResult: frame };
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
export const decorateWithLogsResult = (
|
||||
state: ExploreItemState
|
||||
): MonoTypeOperatorFunction<ExplorePanelData> => inputStream =>
|
||||
inputStream.pipe(
|
||||
map(data => {
|
||||
if (data.error) {
|
||||
return { ...data, logsResult: null };
|
||||
}
|
||||
|
||||
const { absoluteRange, refreshInterval } = state;
|
||||
if (data.logsFrames.length === 0) {
|
||||
return { ...data, logsResult: null };
|
||||
}
|
||||
|
||||
const timeZone = data.request?.timezone ?? 'browser';
|
||||
const intervalMs = data.request?.intervalMs;
|
||||
const newResults = dataFrameToLogsModel(data.logsFrames, intervalMs, timeZone, absoluteRange);
|
||||
const sortOrder = refreshIntervalToSortOrder(refreshInterval);
|
||||
const sortedNewResults = sortLogsResult(newResults, sortOrder);
|
||||
const rows = sortedNewResults.rows;
|
||||
const series = sortedNewResults.series;
|
||||
const logsResult = { ...sortedNewResults, rows, series };
|
||||
|
||||
return { ...data, logsResult };
|
||||
})
|
||||
);
|
||||
|
||||
function isTimeSeries(frame: DataFrame, datasource?: string): boolean {
|
||||
// TEMP: Temporary hack. Remove when logs/metrics unification is done
|
||||
if (datasource && datasource === 'cloudwatch') {
|
||||
return isTimeSeriesCloudWatch(frame);
|
||||
}
|
||||
|
||||
if (frame.fields.length === 2) {
|
||||
if (frame.fields[0].type === FieldType.time) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
return {
|
||||
...data,
|
||||
graphFrames,
|
||||
tableFrames,
|
||||
logsFrames,
|
||||
traceFrames,
|
||||
graphResult: null,
|
||||
tableResult: null,
|
||||
logsResult: null,
|
||||
};
|
||||
};
|
||||
|
||||
function shouldShowInVisualisationType(frame: DataFrame, visualisation: PreferredVisualisationType) {
|
||||
if (frame.meta?.preferredVisualisationType && frame.meta?.preferredVisualisationType !== visualisation) {
|
||||
return false;
|
||||
export const decorateWithGraphResult = (data: ExplorePanelData): ExplorePanelData => {
|
||||
if (data.error) {
|
||||
return { ...data, graphResult: null };
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
const graphResult =
|
||||
data.graphFrames.length === 0
|
||||
? null
|
||||
: getGraphSeriesModel(
|
||||
data.graphFrames,
|
||||
data.request?.timezone ?? 'browser',
|
||||
{},
|
||||
{ showBars: false, showLines: true, showPoints: false },
|
||||
{ asTable: false, isVisible: true, placement: 'under' }
|
||||
);
|
||||
|
||||
function shouldShowInVisualisationTypeStrict(frame: DataFrame, visualisation: PreferredVisualisationType) {
|
||||
return frame.meta?.preferredVisualisationType === visualisation;
|
||||
}
|
||||
return { ...data, graphResult };
|
||||
};
|
||||
|
||||
// TEMP: Temporary hack. Remove when logs/metrics unification is done
|
||||
function isTimeSeriesCloudWatch(frame: DataFrame): boolean {
|
||||
return (
|
||||
frame.fields.some(field => field.type === FieldType.time) &&
|
||||
frame.fields.some(field => field.type === FieldType.number)
|
||||
/**
|
||||
* This processing returns Observable because it uses Transformer internally which result type is also Observable.
|
||||
* In this case the transformer should return single result but it is possible that in the future it could return
|
||||
* multiple results and so this should be used with mergeMap or similar to unbox the internal observable.
|
||||
*/
|
||||
export const decorateWithTableResult = (data: ExplorePanelData): Observable<ExplorePanelData> => {
|
||||
if (data.error) {
|
||||
return of({ ...data, tableResult: null });
|
||||
}
|
||||
|
||||
if (data.tableFrames.length === 0) {
|
||||
return of({ ...data, tableResult: null });
|
||||
}
|
||||
|
||||
data.tableFrames.sort((frameA: DataFrame, frameB: DataFrame) => {
|
||||
const frameARefId = frameA.refId!;
|
||||
const frameBRefId = frameB.refId!;
|
||||
|
||||
if (frameARefId > frameBRefId) {
|
||||
return 1;
|
||||
}
|
||||
if (frameARefId < frameBRefId) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
const hasOnlyTimeseries = data.tableFrames.every(df => isTimeSeries(df));
|
||||
|
||||
// If we have only timeseries we do join on default time column which makes more sense. If we are showing
|
||||
// non timeseries or some mix of data we are not trying to join on anything and just try to merge them in
|
||||
// single table, which may not make sense in most cases, but it's up to the user to query something sensible.
|
||||
const transformer = hasOnlyTimeseries
|
||||
? of(data.tableFrames).pipe(standardTransformers.seriesToColumnsTransformer.operator({}))
|
||||
: of(data.tableFrames).pipe(standardTransformers.mergeTransformer.operator({}));
|
||||
|
||||
return transformer.pipe(
|
||||
map(frames => {
|
||||
const frame = frames[0];
|
||||
|
||||
// set display processor
|
||||
for (const field of frame.fields) {
|
||||
field.display =
|
||||
field.display ??
|
||||
getDisplayProcessor({
|
||||
field,
|
||||
theme: config.theme,
|
||||
timeZone: data.request?.timezone ?? 'browser',
|
||||
});
|
||||
}
|
||||
|
||||
return { ...data, tableResult: frame };
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const decorateWithLogsResult = (
|
||||
options: { absoluteRange?: AbsoluteTimeRange; refreshInterval?: string } = {}
|
||||
) => (data: ExplorePanelData): ExplorePanelData => {
|
||||
if (data.error) {
|
||||
return { ...data, logsResult: null };
|
||||
}
|
||||
|
||||
if (data.logsFrames.length === 0) {
|
||||
return { ...data, logsResult: null };
|
||||
}
|
||||
|
||||
const timeZone = data.request?.timezone ?? 'browser';
|
||||
const intervalMs = data.request?.intervalMs;
|
||||
const newResults = dataFrameToLogsModel(data.logsFrames, intervalMs, timeZone, options.absoluteRange);
|
||||
const sortOrder = refreshIntervalToSortOrder(options.refreshInterval);
|
||||
const sortedNewResults = sortLogsResult(newResults, sortOrder);
|
||||
const rows = sortedNewResults.rows;
|
||||
const series = sortedNewResults.series;
|
||||
const logsResult = { ...sortedNewResults, rows, series };
|
||||
|
||||
return { ...data, logsResult };
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if frame contains time series, which for our purpose means 1 time column and 1 or more numeric columns.
|
||||
*/
|
||||
function isTimeSeries(frame: DataFrame): boolean {
|
||||
const grouped = groupBy(frame.fields, field => field.type);
|
||||
return Boolean(
|
||||
Object.keys(grouped).length === 2 && grouped[FieldType.time]?.length === 1 && grouped[FieldType.number]
|
||||
);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user