mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Explore: Use DataFrame to derive graph/table/logs (#18859)
* WIP: Use data frames in explore * Explore: everything seems to be working again * Reworked ResultProcessor tests * Fixed unit test * Add some typings and comments
This commit is contained in:
@@ -16,7 +16,6 @@ import {
|
||||
LogsMetaKind,
|
||||
LogsDedupStrategy,
|
||||
GraphSeriesXY,
|
||||
LoadingState,
|
||||
dateTime,
|
||||
toUtc,
|
||||
NullValueMode,
|
||||
@@ -193,7 +192,7 @@ export function dataFrameToLogsModel(dataFrame: DataFrame[], intervalMs: number)
|
||||
logsModel.series = makeSeriesForLogs(logsModel.rows, intervalMs);
|
||||
} else {
|
||||
logsModel.series = getGraphSeriesModel(
|
||||
{ series: metricSeries, state: LoadingState.Done },
|
||||
metricSeries,
|
||||
{},
|
||||
{ showBars: true, showLines: false, showPoints: false },
|
||||
{
|
||||
|
||||
@@ -274,7 +274,9 @@ export class PanelQueryState {
|
||||
|
||||
return {
|
||||
state: done ? LoadingState.Done : LoadingState.Streaming,
|
||||
series, // Union of series from response and all streams
|
||||
// This should not be needed but unfortunately Prometheus datasource sends non DataFrame here bypassing the
|
||||
// typing.
|
||||
series: this.sendFrames ? getProcessedDataFrames(series) : [],
|
||||
legacy: this.sendLegacy ? translateToLegacyData(series) : undefined,
|
||||
request: {
|
||||
...this.request,
|
||||
|
||||
@@ -611,13 +611,10 @@ export const processQueryResponse = (
|
||||
}
|
||||
|
||||
const latency = request.endTime - request.startTime;
|
||||
|
||||
// temporary hack until we switch to PanelData, Loki already converts to DataFrame so using legacy will destroy the format
|
||||
const isLokiDataSource = state.datasourceInstance.meta.name === 'Loki';
|
||||
const processor = new ResultProcessor(state, replacePreviousResults, isLokiDataSource ? series : legacy);
|
||||
const processor = new ResultProcessor(state, replacePreviousResults, series);
|
||||
|
||||
// For Angular editors
|
||||
state.eventBridge.emit('data-received', processor.getRawData());
|
||||
state.eventBridge.emit('data-received', legacy);
|
||||
|
||||
return {
|
||||
...state,
|
||||
|
||||
@@ -16,47 +16,39 @@ jest.mock('@grafana/data/src/utils/moment_wrapper', () => ({
|
||||
import { ResultProcessor } from './ResultProcessor';
|
||||
import { ExploreItemState, ExploreMode } from 'app/types/explore';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import { TimeSeries, LogRowModel, LogsMetaItem, GraphSeriesXY } from '@grafana/data';
|
||||
import { TimeSeries, LogRowModel, LogsMetaItem, GraphSeriesXY, toDataFrame, FieldType } from '@grafana/data';
|
||||
|
||||
const testContext = (options: any = {}) => {
|
||||
const response = [
|
||||
{
|
||||
target: 'A-series',
|
||||
alias: 'A-series',
|
||||
datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]],
|
||||
refId: 'A',
|
||||
},
|
||||
{
|
||||
columns: [
|
||||
{
|
||||
text: 'Time',
|
||||
},
|
||||
{
|
||||
text: 'Message',
|
||||
},
|
||||
{
|
||||
text: 'Description',
|
||||
},
|
||||
{
|
||||
text: 'Value',
|
||||
},
|
||||
],
|
||||
rows: [
|
||||
[1559038518831, 'This is a message', 'Description', 23.1],
|
||||
[1559038519831, 'This is a message', 'Description', 23.1],
|
||||
],
|
||||
refId: 'B',
|
||||
},
|
||||
];
|
||||
const timeSeries = toDataFrame({
|
||||
name: 'A-series',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'A-series', type: FieldType.number, values: [4, 5, 6] },
|
||||
{ name: 'time', type: FieldType.time, values: [100, 200, 300] },
|
||||
],
|
||||
});
|
||||
|
||||
const table = toDataFrame({
|
||||
name: 'table-res',
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'value', type: FieldType.number, values: [4, 5, 6] },
|
||||
{ name: 'time', type: FieldType.time, values: [100, 200, 300] },
|
||||
{ name: 'message', type: FieldType.string, values: ['this is a message', 'second message', 'third'] },
|
||||
],
|
||||
});
|
||||
|
||||
const defaultOptions = {
|
||||
mode: ExploreMode.Metrics,
|
||||
replacePreviousResults: true,
|
||||
result: { data: response },
|
||||
dataFrames: [timeSeries, table],
|
||||
graphResult: [] as TimeSeries[],
|
||||
tableResult: new TableModel(),
|
||||
logsResult: { hasUniqueLabels: false, rows: [] as LogRowModel[] },
|
||||
};
|
||||
|
||||
const combinedOptions = { ...defaultOptions, ...options };
|
||||
|
||||
const state = ({
|
||||
mode: combinedOptions.mode,
|
||||
graphResult: combinedOptions.graphResult,
|
||||
@@ -64,28 +56,24 @@ const testContext = (options: any = {}) => {
|
||||
logsResult: combinedOptions.logsResult,
|
||||
queryIntervals: { intervalMs: 10 },
|
||||
} as any) as ExploreItemState;
|
||||
const resultProcessor = new ResultProcessor(state, combinedOptions.replacePreviousResults, combinedOptions.result);
|
||||
|
||||
const resultProcessor = new ResultProcessor(
|
||||
state,
|
||||
combinedOptions.replacePreviousResults,
|
||||
combinedOptions.dataFrames
|
||||
);
|
||||
|
||||
return {
|
||||
result: combinedOptions.result,
|
||||
dataFrames: combinedOptions.dataFrames,
|
||||
resultProcessor,
|
||||
};
|
||||
};
|
||||
|
||||
describe('ResultProcessor', () => {
|
||||
describe('constructed without result', () => {
|
||||
describe('when calling getRawData', () => {
|
||||
it('then it should return an empty array', () => {
|
||||
const { resultProcessor } = testContext({ result: null });
|
||||
const theResult = resultProcessor.getRawData();
|
||||
|
||||
expect(theResult).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when calling getGraphResult', () => {
|
||||
it('then it should return an empty array', () => {
|
||||
const { resultProcessor } = testContext({ result: null });
|
||||
const { resultProcessor } = testContext({ dataFrames: [] });
|
||||
const theResult = resultProcessor.getGraphResult();
|
||||
|
||||
expect(theResult).toEqual([]);
|
||||
@@ -94,7 +82,7 @@ describe('ResultProcessor', () => {
|
||||
|
||||
describe('when calling getTableResult', () => {
|
||||
it('then it should return an empty TableModel', () => {
|
||||
const { resultProcessor } = testContext({ result: null });
|
||||
const { resultProcessor } = testContext({ dataFrames: [] });
|
||||
const theResult = resultProcessor.getTableResult();
|
||||
|
||||
expect(theResult).toEqual(new TableModel());
|
||||
@@ -103,7 +91,7 @@ describe('ResultProcessor', () => {
|
||||
|
||||
describe('when calling getLogsResult', () => {
|
||||
it('then it should return null', () => {
|
||||
const { resultProcessor } = testContext({ result: null });
|
||||
const { resultProcessor } = testContext({ dataFrames: [] });
|
||||
const theResult = resultProcessor.getLogsResult();
|
||||
|
||||
expect(theResult).toBeNull();
|
||||
@@ -112,15 +100,6 @@ describe('ResultProcessor', () => {
|
||||
});
|
||||
|
||||
describe('constructed with a result that is a DataQueryResponse', () => {
|
||||
describe('when calling getRawData', () => {
|
||||
it('then it should return result.data', () => {
|
||||
const { result, resultProcessor } = testContext();
|
||||
const theResult = resultProcessor.getRawData();
|
||||
|
||||
expect(theResult).toEqual(result.data);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when calling getGraphResult', () => {
|
||||
it('then it should return correct graph result', () => {
|
||||
const { resultProcessor } = testContext();
|
||||
@@ -130,7 +109,7 @@ describe('ResultProcessor', () => {
|
||||
{
|
||||
label: 'A-series',
|
||||
color: '#7EB26D',
|
||||
data: [[1559038518831, 39.91264531864214], [1559038519831, 40.35179822906545]],
|
||||
data: [[100, 4], [200, 5], [300, 6]],
|
||||
info: undefined,
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
@@ -148,11 +127,12 @@ describe('ResultProcessor', () => {
|
||||
|
||||
expect(theResult).toEqual({
|
||||
columnMap: {},
|
||||
columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }],
|
||||
rows: [
|
||||
[1559038518831, 'This is a message', 'Description', 23.1],
|
||||
[1559038519831, 'This is a message', 'Description', 23.1],
|
||||
columns: [
|
||||
{ text: 'value', type: 'number', filterable: undefined },
|
||||
{ text: 'time', type: 'time', filterable: undefined },
|
||||
{ text: 'message', type: 'string', filterable: undefined },
|
||||
],
|
||||
rows: [[4, 100, 'this is a message'], [5, 200, 'second message'], [6, 300, 'third']],
|
||||
type: 'table',
|
||||
});
|
||||
});
|
||||
@@ -160,41 +140,53 @@ describe('ResultProcessor', () => {
|
||||
|
||||
describe('when calling getLogsResult', () => {
|
||||
it('then it should return correct logs result', () => {
|
||||
const { resultProcessor } = testContext({ mode: ExploreMode.Logs, observerResponse: null });
|
||||
const { resultProcessor } = testContext({ mode: ExploreMode.Logs });
|
||||
const theResult = resultProcessor.getLogsResult();
|
||||
|
||||
console.log(JSON.stringify(theResult));
|
||||
|
||||
expect(theResult).toEqual({
|
||||
hasUniqueLabels: false,
|
||||
meta: [],
|
||||
rows: [
|
||||
{
|
||||
entry: 'This is a message',
|
||||
entry: 'third',
|
||||
hasAnsi: false,
|
||||
labels: undefined,
|
||||
logLevel: 'unknown',
|
||||
raw: 'This is a message',
|
||||
raw: 'third',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 1559038519831,
|
||||
timeEpochMs: 300,
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
timestamp: 1559038519831,
|
||||
timestamp: 300,
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
entry: 'This is a message',
|
||||
entry: 'second message',
|
||||
hasAnsi: false,
|
||||
labels: undefined,
|
||||
logLevel: 'unknown',
|
||||
raw: 'This is a message',
|
||||
raw: 'second message',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 1559038518831,
|
||||
timeEpochMs: 200,
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
timestamp: 1559038518831,
|
||||
timestamp: 200,
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
entry: 'this is a message',
|
||||
hasAnsi: false,
|
||||
labels: undefined,
|
||||
logLevel: 'unknown',
|
||||
raw: 'this is a message',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 100,
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
timestamp: 100,
|
||||
uniqueLabels: {},
|
||||
},
|
||||
],
|
||||
@@ -202,7 +194,7 @@ describe('ResultProcessor', () => {
|
||||
{
|
||||
label: 'A-series',
|
||||
color: '#7EB26D',
|
||||
data: [[1559038518831, 39.91264531864214], [1559038519831, 40.35179822906545]],
|
||||
data: [[100, 4], [200, 5], [300, 6]],
|
||||
info: undefined,
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
@@ -216,84 +208,6 @@ describe('ResultProcessor', () => {
|
||||
});
|
||||
|
||||
describe('constructed with result that is a DataQueryResponse and merging with previous results', () => {
|
||||
describe('when calling getRawData', () => {
|
||||
it('then it should return result.data', () => {
|
||||
const { result, resultProcessor } = testContext();
|
||||
const theResult = resultProcessor.getRawData();
|
||||
|
||||
expect(theResult).toEqual(result.data);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when calling getGraphResult', () => {
|
||||
it('then it should return correct graph result', () => {
|
||||
const { resultProcessor } = testContext({
|
||||
replacePreviousResults: false,
|
||||
graphResult: [
|
||||
{
|
||||
label: 'A-series',
|
||||
color: '#7EB26D',
|
||||
data: [[1558038518831, 19.91264531864214], [1558038518831, 20.35179822906545]],
|
||||
info: undefined,
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
index: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
const theResult = resultProcessor.getGraphResult();
|
||||
|
||||
expect(theResult).toEqual([
|
||||
{
|
||||
label: 'A-series',
|
||||
color: '#7EB26D',
|
||||
data: [
|
||||
[1558038518831, 19.91264531864214],
|
||||
[1558038518831, 20.35179822906545],
|
||||
[1559038518831, 39.91264531864214],
|
||||
[1559038519831, 40.35179822906545],
|
||||
],
|
||||
info: undefined,
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
index: 1,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when calling getTableResult', () => {
|
||||
it('then it should return correct table result', () => {
|
||||
const { resultProcessor } = testContext({
|
||||
replacePreviousResults: false,
|
||||
tableResult: {
|
||||
columnMap: {},
|
||||
columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }],
|
||||
rows: [
|
||||
[1558038518831, 'This is a previous message 1', 'Previous Description 1', 21.1],
|
||||
[1558038519831, 'This is a previous message 2', 'Previous Description 2', 22.1],
|
||||
],
|
||||
type: 'table',
|
||||
},
|
||||
});
|
||||
const theResult = resultProcessor.getTableResult();
|
||||
|
||||
expect(theResult).toEqual({
|
||||
columnMap: {},
|
||||
columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }],
|
||||
rows: [
|
||||
[1558038518831, 'This is a previous message 1', 'Previous Description 1', 21.1],
|
||||
[1558038519831, 'This is a previous message 2', 'Previous Description 2', 22.1],
|
||||
[1559038518831, 'This is a message', 'Description', 23.1],
|
||||
[1559038519831, 'This is a message', 'Description', 23.1],
|
||||
],
|
||||
type: 'table',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when calling getLogsResult', () => {
|
||||
it('then it should return correct logs result', () => {
|
||||
const { resultProcessor } = testContext({
|
||||
@@ -348,6 +262,7 @@ describe('ResultProcessor', () => {
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const theResult = resultProcessor.getLogsResult();
|
||||
const expected = {
|
||||
hasUniqueLabels: false,
|
||||
@@ -384,33 +299,48 @@ describe('ResultProcessor', () => {
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
entry: 'This is a message',
|
||||
entry: 'third',
|
||||
fresh: true,
|
||||
hasAnsi: false,
|
||||
labels: undefined,
|
||||
logLevel: 'unknown',
|
||||
raw: 'This is a message',
|
||||
raw: 'third',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 1559038519831,
|
||||
timeEpochMs: 300,
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
timestamp: 1559038519831,
|
||||
timestamp: 300,
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
entry: 'This is a message',
|
||||
entry: 'second message',
|
||||
fresh: true,
|
||||
hasAnsi: false,
|
||||
labels: undefined,
|
||||
logLevel: 'unknown',
|
||||
raw: 'This is a message',
|
||||
raw: 'second message',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 1559038518831,
|
||||
timeEpochMs: 200,
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
timestamp: 1559038518831,
|
||||
timestamp: 200,
|
||||
uniqueLabels: {},
|
||||
},
|
||||
{
|
||||
entry: 'this is a message',
|
||||
fresh: true,
|
||||
hasAnsi: false,
|
||||
labels: undefined,
|
||||
logLevel: 'unknown',
|
||||
raw: 'this is a message',
|
||||
searchWords: [] as string[],
|
||||
timeEpochMs: 100,
|
||||
timeFromNow: 'fromNow() jest mocked',
|
||||
timeLocal: 'format() jest mocked',
|
||||
timeUtc: 'format() jest mocked',
|
||||
timestamp: 100,
|
||||
uniqueLabels: {},
|
||||
},
|
||||
],
|
||||
@@ -418,12 +348,7 @@ describe('ResultProcessor', () => {
|
||||
{
|
||||
label: 'A-series',
|
||||
color: '#7EB26D',
|
||||
data: [
|
||||
[1558038518831, 37.91264531864214],
|
||||
[1558038519831, 38.35179822906545],
|
||||
[1559038518831, 39.91264531864214],
|
||||
[1559038519831, 40.35179822906545],
|
||||
],
|
||||
data: [[100, 4], [200, 5], [300, 6]],
|
||||
info: undefined,
|
||||
isVisible: true,
|
||||
yAxis: {
|
||||
|
||||
@@ -1,85 +1,88 @@
|
||||
import { DataQueryResponse, DataQueryResponseData } from '@grafana/ui';
|
||||
|
||||
import {
|
||||
TableData,
|
||||
isTableData,
|
||||
LogsModel,
|
||||
toDataFrame,
|
||||
guessFieldTypes,
|
||||
TimeSeries,
|
||||
GraphSeriesXY,
|
||||
LoadingState,
|
||||
} from '@grafana/data';
|
||||
import { LogsModel, GraphSeriesXY, DataFrame, FieldType } from '@grafana/data';
|
||||
|
||||
import { ExploreItemState, ExploreMode } from 'app/types/explore';
|
||||
import { getProcessedDataFrames } from 'app/features/dashboard/state/PanelQueryState';
|
||||
import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
|
||||
import { sortLogsResult, refreshIntervalToSortOrder } from 'app/core/utils/explore';
|
||||
import { dataFrameToLogsModel } from 'app/core/logs_model';
|
||||
import { getGraphSeriesModel } from 'app/plugins/panel/graph2/getGraphSeriesModel';
|
||||
|
||||
export class ResultProcessor {
|
||||
private rawData: DataQueryResponseData[] = [];
|
||||
private metrics: TimeSeries[] = [];
|
||||
private tables: TableData[] = [];
|
||||
|
||||
constructor(
|
||||
private state: ExploreItemState,
|
||||
private replacePreviousResults: boolean,
|
||||
result?: DataQueryResponse | DataQueryResponseData[]
|
||||
) {
|
||||
if (result && result.hasOwnProperty('data')) {
|
||||
this.rawData = (result as DataQueryResponse).data;
|
||||
} else {
|
||||
this.rawData = (result as DataQueryResponseData[]) || [];
|
||||
}
|
||||
private dataFrames: DataFrame[]
|
||||
) {}
|
||||
|
||||
if (this.state.mode !== ExploreMode.Metrics) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (let index = 0; index < this.rawData.length; index++) {
|
||||
const res: any = this.rawData[index];
|
||||
const isTable = isTableData(res);
|
||||
if (isTable) {
|
||||
this.tables.push(res);
|
||||
} else {
|
||||
this.metrics.push(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getRawData = (): any[] => {
|
||||
return this.rawData;
|
||||
};
|
||||
|
||||
getGraphResult = (): GraphSeriesXY[] => {
|
||||
getGraphResult(): GraphSeriesXY[] {
|
||||
if (this.state.mode !== ExploreMode.Metrics) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const newResults = this.createGraphSeries(this.metrics);
|
||||
return this.mergeGraphResults(newResults, this.state.graphResult);
|
||||
};
|
||||
const onlyTimeSeries = this.dataFrames.filter(series => series.fields.length === 2);
|
||||
|
||||
getTableResult = (): TableModel => {
|
||||
return getGraphSeriesModel(
|
||||
onlyTimeSeries,
|
||||
{},
|
||||
{ showBars: false, showLines: true, showPoints: false },
|
||||
{ asTable: false, isVisible: true, placement: 'under' }
|
||||
);
|
||||
}
|
||||
|
||||
getTableResult(): TableModel {
|
||||
if (this.state.mode !== ExploreMode.Metrics) {
|
||||
return new TableModel();
|
||||
}
|
||||
|
||||
const prevTableResults: any[] | TableModel = this.state.tableResult || [];
|
||||
const tablesToMerge = this.replacePreviousResults ? this.tables : [].concat(prevTableResults, this.tables);
|
||||
// For now ignore time series
|
||||
// We can change this later, just need to figure out how to
|
||||
// Ignore time series only for prometheus
|
||||
const onlyTables = this.dataFrames.filter(frame => {
|
||||
if (frame.fields.length === 2) {
|
||||
if (frame.fields[1].type === FieldType.time) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
return mergeTablesIntoModel(new TableModel(), ...tablesToMerge);
|
||||
};
|
||||
const tables = onlyTables.map(frame => {
|
||||
const { fields } = frame;
|
||||
const fieldCount = fields.length;
|
||||
const rowCount = fields[0].values.length;
|
||||
|
||||
getLogsResult = (): LogsModel => {
|
||||
const columns = fields.map(field => ({
|
||||
text: field.name,
|
||||
type: field.type,
|
||||
filterable: field.config.filterable,
|
||||
}));
|
||||
|
||||
const rows: any[][] = [];
|
||||
for (let i = 0; i < rowCount; i++) {
|
||||
const row: any[] = [];
|
||||
for (let j = 0; j < fieldCount; j++) {
|
||||
row.push(frame.fields[j].values.get(i));
|
||||
}
|
||||
rows.push(row);
|
||||
}
|
||||
|
||||
return new TableModel({
|
||||
columns,
|
||||
rows,
|
||||
meta: frame.meta,
|
||||
});
|
||||
});
|
||||
|
||||
return mergeTablesIntoModel(new TableModel(), ...tables);
|
||||
}
|
||||
|
||||
getLogsResult(): LogsModel {
|
||||
if (this.state.mode !== ExploreMode.Logs) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const graphInterval = this.state.queryIntervals.intervalMs;
|
||||
const dataFrame = this.rawData.map(result => guessFieldTypes(toDataFrame(result)));
|
||||
const newResults = this.rawData ? dataFrameToLogsModel(dataFrame, graphInterval) : null;
|
||||
|
||||
const newResults = dataFrameToLogsModel(this.dataFrames, graphInterval);
|
||||
const sortOrder = refreshIntervalToSortOrder(this.state.refreshInterval);
|
||||
const sortedNewResults = sortLogsResult(newResults, sortOrder);
|
||||
|
||||
@@ -94,7 +97,6 @@ export class ResultProcessor {
|
||||
const prevLogsResult: LogsModel = this.state.logsResult || { hasUniqueLabels: false, rows: [] };
|
||||
const sortedLogResult = sortLogsResult(prevLogsResult, sortOrder);
|
||||
const rowsInState = sortedLogResult.rows;
|
||||
const seriesInState = sortedLogResult.series || [];
|
||||
|
||||
const processedRows = [];
|
||||
for (const row of rowsInState) {
|
||||
@@ -104,78 +106,10 @@ export class ResultProcessor {
|
||||
processedRows.push({ ...row, fresh: true });
|
||||
}
|
||||
|
||||
const processedSeries = this.mergeGraphResults(sortedNewResults.series, seriesInState);
|
||||
|
||||
const slice = -1000;
|
||||
const rows = processedRows.slice(slice);
|
||||
const series = processedSeries.slice(slice);
|
||||
const series = sortedNewResults.series.slice(slice);
|
||||
|
||||
return { ...sortedNewResults, rows, series };
|
||||
};
|
||||
|
||||
private createGraphSeries = (rawData: any[]) => {
|
||||
const dataFrames = getProcessedDataFrames(rawData);
|
||||
const graphSeries = getGraphSeriesModel(
|
||||
{ series: dataFrames, state: LoadingState.Done },
|
||||
{},
|
||||
{ showBars: false, showLines: true, showPoints: false },
|
||||
{
|
||||
asTable: false,
|
||||
isVisible: true,
|
||||
placement: 'under',
|
||||
}
|
||||
);
|
||||
|
||||
return graphSeries;
|
||||
};
|
||||
|
||||
private isSameGraphSeries = (a: GraphSeriesXY, b: GraphSeriesXY) => {
|
||||
if (a.hasOwnProperty('label') && b.hasOwnProperty('label')) {
|
||||
const aValue = a.label;
|
||||
const bValue = b.label;
|
||||
if (aValue !== undefined && bValue !== undefined && aValue === bValue) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
private mergeGraphResults = (newResults: GraphSeriesXY[], prevResults: GraphSeriesXY[]): GraphSeriesXY[] => {
|
||||
if (!prevResults || prevResults.length === 0 || this.replacePreviousResults) {
|
||||
return newResults; // Hack before we use GraphSeriesXY instead
|
||||
}
|
||||
|
||||
const results: GraphSeriesXY[] = prevResults.slice() as GraphSeriesXY[];
|
||||
|
||||
// update existing results
|
||||
for (let index = 0; index < results.length; index++) {
|
||||
const prevResult = results[index];
|
||||
for (const newResult of newResults) {
|
||||
const isSame = this.isSameGraphSeries(prevResult, newResult);
|
||||
|
||||
if (isSame) {
|
||||
prevResult.data = prevResult.data.concat(newResult.data);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add new results
|
||||
for (const newResult of newResults) {
|
||||
let isNew = true;
|
||||
for (const prevResult of results) {
|
||||
const isSame = this.isSameGraphSeries(prevResult, newResult);
|
||||
if (isSame) {
|
||||
isNew = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (isNew) {
|
||||
results.push(newResult);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -181,26 +181,6 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
activeTargets: PromQuery[],
|
||||
end: number
|
||||
) => {
|
||||
// Because we want to get run instant and TimeSeries Prom queries in parallel but this isn't actually streaming
|
||||
// we need to stop/cancel each posted event with a stop stream event (see below) to the observer so that the
|
||||
// PanelQueryState stops the stream
|
||||
const getStopState = (state: DataStreamState): DataStreamState => ({
|
||||
...state,
|
||||
state: LoadingState.Done,
|
||||
request: { ...options, requestId: 'done' },
|
||||
});
|
||||
|
||||
const startLoadingEvent: DataStreamState = {
|
||||
key: `prometheus-loading_indicator`,
|
||||
state: LoadingState.Loading,
|
||||
request: options,
|
||||
data: [],
|
||||
unsubscribe: () => undefined,
|
||||
};
|
||||
|
||||
observer(startLoadingEvent); // Starts the loading indicator
|
||||
const lastTimeSeriesQuery = queries.filter(query => !query.instant).pop();
|
||||
|
||||
for (let index = 0; index < queries.length; index++) {
|
||||
const query = queries[index];
|
||||
const target = activeTargets[index];
|
||||
@@ -220,19 +200,15 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
const data = this.processResult(response, query, target, queries.length);
|
||||
const state: DataStreamState = {
|
||||
key: `prometheus-${target.refId}`,
|
||||
state: LoadingState.Loading,
|
||||
state: LoadingState.Done,
|
||||
request: options,
|
||||
data,
|
||||
// TODO this is obviously wrong as data is not a DataFrame and needs to be dealt with later on
|
||||
// in PanelQueryState
|
||||
data: data as any,
|
||||
unsubscribe: () => undefined,
|
||||
};
|
||||
|
||||
const states = [state, getStopState(state)];
|
||||
|
||||
if (target.refId === lastTimeSeriesQuery.refId && target.expr === lastTimeSeriesQuery.expr) {
|
||||
states.push(getStopState(startLoadingEvent)); // Stops the loading indicator
|
||||
}
|
||||
|
||||
return states;
|
||||
return [state];
|
||||
}),
|
||||
catchError(err => {
|
||||
const error = this.handleErrors(err, target);
|
||||
@@ -306,6 +282,7 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
this.runObserverQueries(options, observer, queries, activeTargets, end);
|
||||
return this.$q.when({ data: [] }) as Promise<{ data: any }>;
|
||||
}
|
||||
|
||||
const allQueryPromise = _.map(queries, query => {
|
||||
if (query.instant) {
|
||||
return this.performInstantQuery(query, end);
|
||||
|
||||
@@ -6,7 +6,7 @@ import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
export class ResultTransformer {
|
||||
constructor(private templateSrv: TemplateSrv) {}
|
||||
|
||||
transform(response: any, options: any): any[] {
|
||||
transform(response: any, options: any): Array<TableModel | TimeSeries> {
|
||||
const prometheusResult = response.data.data.result;
|
||||
|
||||
if (options.format === 'table') {
|
||||
@@ -80,7 +80,7 @@ export class ResultTransformer {
|
||||
};
|
||||
}
|
||||
|
||||
transformMetricDataToTable(md: any, resultCount: number, refId: string, valueWithRefId?: boolean) {
|
||||
transformMetricDataToTable(md: any, resultCount: number, refId: string, valueWithRefId?: boolean): TableModel {
|
||||
const table = new TableModel();
|
||||
let i: number, j: number;
|
||||
const metricLabels: { [key: string]: number } = {};
|
||||
|
||||
@@ -35,7 +35,7 @@ export class GraphPanelController extends React.Component<GraphPanelControllerPr
|
||||
|
||||
this.state = {
|
||||
graphSeriesModel: getGraphSeriesModel(
|
||||
props.data,
|
||||
props.data.series,
|
||||
props.options.series,
|
||||
props.options.graph,
|
||||
props.options.legend
|
||||
@@ -47,7 +47,7 @@ export class GraphPanelController extends React.Component<GraphPanelControllerPr
|
||||
return {
|
||||
...state,
|
||||
graphSeriesModel: getGraphSeriesModel(
|
||||
props.data,
|
||||
props.data.series,
|
||||
props.options.series,
|
||||
props.options.graph,
|
||||
props.options.legend
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
import { colors, getFlotPairs, getColorFromHexRgbOrName, getDisplayProcessor, PanelData } from '@grafana/ui';
|
||||
import { NullValueMode, reduceField, FieldType, DisplayValue, GraphSeriesXY, getTimeField } from '@grafana/data';
|
||||
import { colors, getFlotPairs, getColorFromHexRgbOrName, getDisplayProcessor } from '@grafana/ui';
|
||||
import {
|
||||
NullValueMode,
|
||||
reduceField,
|
||||
FieldType,
|
||||
DisplayValue,
|
||||
GraphSeriesXY,
|
||||
getTimeField,
|
||||
DataFrame,
|
||||
} from '@grafana/data';
|
||||
|
||||
import { SeriesOptions, GraphOptions } from './types';
|
||||
import { GraphLegendEditorLegendOptions } from './GraphLegendEditor';
|
||||
|
||||
export const getGraphSeriesModel = (
|
||||
data: PanelData,
|
||||
dataFrames: DataFrame[],
|
||||
seriesOptions: SeriesOptions,
|
||||
graphOptions: GraphOptions,
|
||||
legendOptions: GraphLegendEditorLegendOptions
|
||||
@@ -18,7 +26,7 @@ export const getGraphSeriesModel = (
|
||||
},
|
||||
});
|
||||
|
||||
for (const series of data.series) {
|
||||
for (const series of dataFrames) {
|
||||
const { timeField } = getTimeField(series);
|
||||
if (!timeField) {
|
||||
continue;
|
||||
|
||||
Reference in New Issue
Block a user