mirror of
https://github.com/grafana/grafana.git
synced 2025-02-11 16:15:42 -06:00
Fix: Parse exemplars before parsing heatmap data (#47463)
* parse exemplars before parsing the heatmap. Exemplars are overlayed. Not part of the main heatmap * added tests
This commit is contained in:
parent
883f9f718f
commit
222325c7f1
@ -251,6 +251,71 @@ describe('Prometheus Result Transformer', () => {
|
||||
expect(series.data[0].fields[2].values.toArray()).toEqual([10, 0, 30]);
|
||||
expect(series.data[0].fields[3].values.toArray()).toEqual([10, 0, 10]);
|
||||
});
|
||||
|
||||
it('Retains exemplar frames when data returned is a heatmap', () => {
|
||||
const options = {
|
||||
targets: [
|
||||
{
|
||||
format: 'heatmap',
|
||||
refId: 'A',
|
||||
},
|
||||
],
|
||||
} as unknown as DataQueryRequest<PromQuery>;
|
||||
const response = {
|
||||
state: 'Done',
|
||||
data: [
|
||||
new MutableDataFrame({
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [6, 5, 4] },
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
values: [10, 10, 0],
|
||||
labels: { le: '1' },
|
||||
},
|
||||
],
|
||||
}),
|
||||
new MutableDataFrame({
|
||||
refId: 'A',
|
||||
name: 'exemplar',
|
||||
meta: {
|
||||
custom: {
|
||||
resultType: 'exemplar',
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [6, 5, 4, 3, 2, 1] },
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
values: [30, 10, 40, 90, 14, 21],
|
||||
labels: { le: '6' },
|
||||
},
|
||||
{
|
||||
name: 'Test',
|
||||
type: FieldType.string,
|
||||
values: ['hello', 'doctor', 'name', 'continue', 'yesterday', 'tomorrow'],
|
||||
labels: { le: '6' },
|
||||
},
|
||||
],
|
||||
}),
|
||||
],
|
||||
} as unknown as DataQueryResponse;
|
||||
|
||||
const series = transformV2(response, options, {});
|
||||
expect(series.data[0].fields.length).toEqual(2);
|
||||
expect(series.data.length).toEqual(2);
|
||||
expect(series.data[1].fields[2].values.toArray()).toEqual([
|
||||
'hello',
|
||||
'doctor',
|
||||
'name',
|
||||
'continue',
|
||||
'yesterday',
|
||||
'tomorrow',
|
||||
]);
|
||||
expect(series.data[1].fields.length).toEqual(3);
|
||||
});
|
||||
});
|
||||
describe('transformDFToTable', () => {
|
||||
it('transforms dataFrame with response length 1 to table dataFrame', () => {
|
||||
|
@ -72,16 +72,8 @@ export function transformV2(
|
||||
const [tableFrames, framesWithoutTable] = partition<DataFrame>(response.data, (df) => isTableResult(df, request));
|
||||
const processedTableFrames = transformDFToTable(tableFrames);
|
||||
|
||||
const [heatmapResults, framesWithoutTableAndHeatmaps] = partition<DataFrame>(framesWithoutTable, (df) =>
|
||||
isHeatmapResult(df, request)
|
||||
);
|
||||
|
||||
const processedHeatmapFrames = mergeHeatmapFrames(
|
||||
transformToHistogramOverTime(heatmapResults.sort(sortSeriesByLabel))
|
||||
);
|
||||
|
||||
const [exemplarFrames, framesWithoutTableHeatmapsAndExemplars] = partition<DataFrame>(
|
||||
framesWithoutTableAndHeatmaps,
|
||||
const [exemplarFrames, framesWithoutTableAndExemplars] = partition<DataFrame>(
|
||||
framesWithoutTable,
|
||||
(df) => df.meta?.custom?.resultType === 'exemplar'
|
||||
);
|
||||
|
||||
@ -103,6 +95,15 @@ export function transformV2(
|
||||
return { ...dataFrame, meta: { ...dataFrame.meta, dataTopic: DataTopic.Annotations } };
|
||||
});
|
||||
|
||||
const [heatmapResults, framesWithoutTableHeatmapsAndExemplars] = partition<DataFrame>(
|
||||
framesWithoutTableAndExemplars,
|
||||
(df) => isHeatmapResult(df, request)
|
||||
);
|
||||
|
||||
const processedHeatmapFrames = mergeHeatmapFrames(
|
||||
transformToHistogramOverTime(heatmapResults.sort(sortSeriesByLabel))
|
||||
);
|
||||
|
||||
// Everything else is processed as time_series result and graph preferredVisualisationType
|
||||
const otherFrames = framesWithoutTableHeatmapsAndExemplars.map((dataFrame) => {
|
||||
const df = {
|
||||
|
Loading…
Reference in New Issue
Block a user