mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Transformers: move long to wide logic into existing transformer (#64474)
This commit is contained in:
parent
0efa8c5ce1
commit
83e9558cdd
@ -7,6 +7,7 @@ import {
|
||||
SynchronousDataTransformerInfo,
|
||||
getFieldMatcher,
|
||||
DataTransformContext,
|
||||
FieldMatcher,
|
||||
} from '@grafana/data';
|
||||
import { getMatcherConfig } from '@grafana/data/src/transformations/transformers/filterByName';
|
||||
import { noopTransformer } from '@grafana/data/src/transformations/transformers/noop';
|
||||
@ -85,71 +86,80 @@ export const partitionByValuesTransformer: SynchronousDataTransformerInfo<Partit
|
||||
if (!data.length) {
|
||||
return data;
|
||||
}
|
||||
|
||||
const frame = data[0];
|
||||
const keyFields = frame.fields.filter((f) => matcher!(f, frame, data))!;
|
||||
const keyFieldsVals = keyFields.map((f) => f.values.toArray());
|
||||
const names = keyFields.map((f) => f.name);
|
||||
|
||||
const frameNameOpts = {
|
||||
...defaultFrameNameOptions,
|
||||
...options.naming,
|
||||
};
|
||||
|
||||
return partition(keyFieldsVals).map((idxs: number[]) => {
|
||||
let frameName = frame.name;
|
||||
let fieldLabels = {};
|
||||
|
||||
if (frameNameOpts.asLabels) {
|
||||
fieldLabels = buildFieldLabels(
|
||||
names,
|
||||
keyFields.map((f, i) => keyFieldsVals[i][idxs[0]])
|
||||
);
|
||||
} else {
|
||||
let name = buildFrameName(
|
||||
frameNameOpts,
|
||||
names,
|
||||
keyFields.map((f, i) => keyFieldsVals[i][idxs[0]])
|
||||
);
|
||||
|
||||
if (options.naming?.append && frame.name) {
|
||||
name = `${frame.name} ${name}`;
|
||||
}
|
||||
|
||||
frameName = name;
|
||||
}
|
||||
|
||||
let filteredFields = frame.fields;
|
||||
|
||||
if (!options.keepFields) {
|
||||
const keyFieldNames = new Set(names);
|
||||
filteredFields = frame.fields.filter((field) => !keyFieldNames.has(field.name));
|
||||
}
|
||||
|
||||
return {
|
||||
...frame,
|
||||
name: frameName,
|
||||
length: idxs.length,
|
||||
fields: filteredFields.map((f) => {
|
||||
const vals = f.values.toArray();
|
||||
const vals2 = Array(idxs.length);
|
||||
|
||||
for (let i = 0; i < idxs.length; i++) {
|
||||
vals2[i] = vals[idxs[i]];
|
||||
}
|
||||
|
||||
return {
|
||||
...f,
|
||||
labels: {
|
||||
...f.labels,
|
||||
...fieldLabels,
|
||||
},
|
||||
state: undefined,
|
||||
values: new ArrayVector(vals2),
|
||||
};
|
||||
}),
|
||||
};
|
||||
});
|
||||
// error if > 1 frame?
|
||||
return partitionByValues(data[0], matcher, options);
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
// Split a single frame dataset into multiple frames based on values in a set of fields
|
||||
export function partitionByValues(
|
||||
frame: DataFrame,
|
||||
matcher: FieldMatcher,
|
||||
options?: PartitionByValuesTransformerOptions
|
||||
): DataFrame[] {
|
||||
const keyFields = frame.fields.filter((f) => matcher(f, frame, [frame]))!;
|
||||
const keyFieldsVals = keyFields.map((f) => f.values.toArray());
|
||||
const names = keyFields.map((f) => f.name);
|
||||
|
||||
const frameNameOpts = {
|
||||
...defaultFrameNameOptions,
|
||||
...options?.naming,
|
||||
};
|
||||
|
||||
return partition(keyFieldsVals).map((idxs: number[]) => {
|
||||
let frameName = frame.name;
|
||||
let fieldLabels = {};
|
||||
|
||||
if (frameNameOpts.asLabels) {
|
||||
fieldLabels = buildFieldLabels(
|
||||
names,
|
||||
keyFields.map((f, i) => keyFieldsVals[i][idxs[0]])
|
||||
);
|
||||
} else {
|
||||
let name = buildFrameName(
|
||||
frameNameOpts,
|
||||
names,
|
||||
keyFields.map((f, i) => keyFieldsVals[i][idxs[0]])
|
||||
);
|
||||
|
||||
if (frameNameOpts?.append && frame.name) {
|
||||
name = `${frame.name} ${name}`;
|
||||
}
|
||||
|
||||
frameName = name;
|
||||
}
|
||||
|
||||
let filteredFields = frame.fields;
|
||||
|
||||
if (!options?.keepFields) {
|
||||
const keyFieldNames = new Set(names);
|
||||
filteredFields = frame.fields.filter((field) => !keyFieldNames.has(field.name));
|
||||
}
|
||||
|
||||
return {
|
||||
name: frameName,
|
||||
meta: frame.meta,
|
||||
length: idxs.length,
|
||||
fields: filteredFields.map((f) => {
|
||||
const vals = f.values.toArray();
|
||||
const vals2 = Array(idxs.length);
|
||||
|
||||
for (let i = 0; i < idxs.length; i++) {
|
||||
vals2[i] = vals[idxs[i]];
|
||||
}
|
||||
|
||||
return {
|
||||
name: f.name,
|
||||
type: f.type,
|
||||
config: f.config,
|
||||
labels: {
|
||||
...f.labels,
|
||||
...fieldLabels,
|
||||
},
|
||||
values: new ArrayVector(vals2),
|
||||
};
|
||||
}),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
@ -0,0 +1,142 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Prepare time series transformer should handle long to multi 1`] = `
|
||||
[
|
||||
{
|
||||
"fields": [
|
||||
{
|
||||
"config": {},
|
||||
"name": "time",
|
||||
"type": "time",
|
||||
"values": [
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
],
|
||||
},
|
||||
{
|
||||
"config": {},
|
||||
"labels": {
|
||||
"sensor": "a",
|
||||
},
|
||||
"name": "speed",
|
||||
"type": "number",
|
||||
"values": [
|
||||
4,
|
||||
6,
|
||||
8,
|
||||
],
|
||||
},
|
||||
],
|
||||
"length": 3,
|
||||
"meta": {
|
||||
"type": "timeseries-multi",
|
||||
},
|
||||
"name": undefined,
|
||||
"refId": "A",
|
||||
},
|
||||
{
|
||||
"fields": [
|
||||
{
|
||||
"config": {},
|
||||
"name": "time",
|
||||
"type": "time",
|
||||
"values": [
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
],
|
||||
},
|
||||
{
|
||||
"config": {},
|
||||
"labels": {
|
||||
"sensor": "b",
|
||||
},
|
||||
"name": "speed",
|
||||
"type": "number",
|
||||
"values": [
|
||||
5,
|
||||
7,
|
||||
9,
|
||||
],
|
||||
},
|
||||
],
|
||||
"length": 3,
|
||||
"meta": {
|
||||
"type": "timeseries-multi",
|
||||
},
|
||||
"name": undefined,
|
||||
"refId": "A",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`Prepare time series transformer should handle long to wide 1`] = `
|
||||
[
|
||||
{
|
||||
"fields": [
|
||||
{
|
||||
"config": {},
|
||||
"labels": {
|
||||
"sensor": "a",
|
||||
},
|
||||
"name": "time",
|
||||
"state": {
|
||||
"origin": {
|
||||
"fieldIndex": 0,
|
||||
"frameIndex": 0,
|
||||
},
|
||||
},
|
||||
"type": "time",
|
||||
"values": [
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
],
|
||||
},
|
||||
{
|
||||
"config": {},
|
||||
"labels": {
|
||||
"sensor": "a",
|
||||
},
|
||||
"name": "speed",
|
||||
"state": {
|
||||
"origin": {
|
||||
"fieldIndex": 1,
|
||||
"frameIndex": 0,
|
||||
},
|
||||
},
|
||||
"type": "number",
|
||||
"values": [
|
||||
4,
|
||||
6,
|
||||
8,
|
||||
],
|
||||
},
|
||||
{
|
||||
"config": {},
|
||||
"labels": {
|
||||
"sensor": "b",
|
||||
},
|
||||
"name": "speed",
|
||||
"state": {
|
||||
"origin": {
|
||||
"fieldIndex": 1,
|
||||
"frameIndex": 1,
|
||||
},
|
||||
},
|
||||
"type": "number",
|
||||
"values": [
|
||||
5,
|
||||
7,
|
||||
9,
|
||||
],
|
||||
},
|
||||
],
|
||||
"length": 3,
|
||||
"meta": {
|
||||
"type": "timeseries-wide",
|
||||
},
|
||||
},
|
||||
]
|
||||
`;
|
@ -371,6 +371,48 @@ describe('Prepare time series transformer', () => {
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle long to wide', () => {
|
||||
expect(
|
||||
prepareTimeSeriesTransformer.transformer(
|
||||
{
|
||||
format: timeSeriesFormat.TimeSeriesWide,
|
||||
},
|
||||
ctx
|
||||
)([
|
||||
toDataFrame({
|
||||
meta: { type: DataFrameType.TimeSeriesLong },
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1, 1, 2, 2, 3, 3] },
|
||||
{ name: 'speed', type: FieldType.number, values: [4, 5, 6, 7, 8, 9] },
|
||||
{ name: 'sensor', type: FieldType.string, values: ['a', 'b', 'a', 'b', 'a', 'b'] },
|
||||
],
|
||||
}),
|
||||
])
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should handle long to multi', () => {
|
||||
expect(
|
||||
prepareTimeSeriesTransformer.transformer(
|
||||
{
|
||||
format: timeSeriesFormat.TimeSeriesMulti,
|
||||
},
|
||||
ctx
|
||||
)([
|
||||
toDataFrame({
|
||||
meta: { type: DataFrameType.TimeSeriesLong },
|
||||
refId: 'A',
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1, 1, 2, 2, 3, 3] },
|
||||
{ name: 'speed', type: FieldType.number, values: [4, 5, 6, 7, 8, 9] },
|
||||
{ name: 'sensor', type: FieldType.string, values: ['a', 'b', 'a', 'b', 'a', 'b'] },
|
||||
],
|
||||
}),
|
||||
])
|
||||
).toMatchSnapshot(); // ???? expecting a single frame!!!!
|
||||
});
|
||||
});
|
||||
|
||||
function toEquableDataFrame(source: any): DataFrame {
|
||||
|
@ -15,6 +15,8 @@ import {
|
||||
} from '@grafana/data';
|
||||
import { Labels } from 'app/types/unified-alerting-dto';
|
||||
|
||||
import { partitionByValues } from '../partitionByValues/partitionByValues';
|
||||
|
||||
/**
|
||||
* There is currently an effort to figure out consistent names
|
||||
* for the various formats/types we produce and use.
|
||||
@ -27,9 +29,11 @@ import { Labels } from 'app/types/unified-alerting-dto';
|
||||
|
||||
export enum timeSeriesFormat {
|
||||
TimeSeriesWide = 'wide',
|
||||
TimeSeriesMany = 'many',
|
||||
TimeSeriesLong = 'long',
|
||||
TimeSeriesMulti = 'multi',
|
||||
|
||||
/** @deprecated use multi */
|
||||
TimeSeriesMany = 'many',
|
||||
}
|
||||
|
||||
export type PrepareTimeSeriesOptions = {
|
||||
@ -282,6 +286,20 @@ export function toTimeSeriesLong(data: DataFrame[]): DataFrame[] {
|
||||
return result;
|
||||
}
|
||||
|
||||
export function longToMultiTimeSeries(frame: DataFrame): DataFrame[] {
|
||||
// All the string fields
|
||||
const matcher = (field: Field) => field.type === FieldType.string;
|
||||
|
||||
// transform one dataFrame at a time and concat into DataFrame[]
|
||||
return partitionByValues(frame, matcher).map((frame) => {
|
||||
if (!frame.meta) {
|
||||
frame.meta = {};
|
||||
}
|
||||
frame.meta.type = DataFrameType.TimeSeriesMulti;
|
||||
return frame;
|
||||
});
|
||||
}
|
||||
|
||||
export const prepareTimeSeriesTransformer: SynchronousDataTransformerInfo<PrepareTimeSeriesOptions> = {
|
||||
id: DataTransformerID.prepareTimeSeries,
|
||||
name: 'Prepare time series',
|
||||
@ -293,20 +311,43 @@ export const prepareTimeSeriesTransformer: SynchronousDataTransformerInfo<Prepar
|
||||
|
||||
transformer: (options: PrepareTimeSeriesOptions) => {
|
||||
const format = options?.format ?? timeSeriesFormat.TimeSeriesWide;
|
||||
if (format === timeSeriesFormat.TimeSeriesMany || timeSeriesFormat.TimeSeriesMulti) {
|
||||
if (format === timeSeriesFormat.TimeSeriesMany || format === timeSeriesFormat.TimeSeriesMulti) {
|
||||
return toTimeSeriesMulti;
|
||||
} else if (format === timeSeriesFormat.TimeSeriesLong) {
|
||||
return toTimeSeriesLong;
|
||||
}
|
||||
const joinBy = fieldMatchers.get(FieldMatcherID.firstTimeField).get({});
|
||||
|
||||
// Single TimeSeriesWide frame (joined by time)
|
||||
return (data: DataFrame[]) => {
|
||||
if (!data.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Convert long to wide first
|
||||
const join: DataFrame[] = [];
|
||||
for (const df of data) {
|
||||
if (df.meta?.type === DataFrameType.TimeSeriesLong) {
|
||||
longToMultiTimeSeries(df).forEach((v) => join.push(v));
|
||||
} else {
|
||||
join.push(df);
|
||||
}
|
||||
}
|
||||
|
||||
// Join by the first frame
|
||||
const frame = outerJoinDataFrames({
|
||||
frames: data,
|
||||
joinBy: fieldMatchers.get(FieldMatcherID.firstTimeField).get({}),
|
||||
frames: join,
|
||||
joinBy,
|
||||
keepOriginIndices: true,
|
||||
});
|
||||
return frame ? [frame] : [];
|
||||
if (frame) {
|
||||
if (!frame.meta) {
|
||||
frame.meta = {};
|
||||
}
|
||||
frame.meta.type = DataFrameType.TimeSeriesWide;
|
||||
return [frame];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
},
|
||||
};
|
||||
|
@ -1,7 +1,6 @@
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataFrameType,
|
||||
Field,
|
||||
FieldType,
|
||||
getDisplayProcessor,
|
||||
@ -16,7 +15,6 @@ import { convertFieldType } from '@grafana/data/src/transformations/transformers
|
||||
import { GraphFieldConfig, LineInterpolation } from '@grafana/schema';
|
||||
import { applyNullInsertThreshold } from '@grafana/ui/src/components/GraphNG/nullInsertThreshold';
|
||||
import { nullToValue } from '@grafana/ui/src/components/GraphNG/nullToValue';
|
||||
import { partitionByValuesTransformer } from 'app/features/transformers/partitionByValues/partitionByValues';
|
||||
|
||||
/**
|
||||
* Returns null if there are no graphable fields
|
||||
@ -41,10 +39,6 @@ export function prepareGraphableFields(
|
||||
}
|
||||
}
|
||||
|
||||
if (series.every((df) => df.meta?.type === DataFrameType.TimeSeriesLong)) {
|
||||
series = prepareTimeSeriesLong(series);
|
||||
}
|
||||
|
||||
let copy: Field;
|
||||
|
||||
const frames: DataFrame[] = [];
|
||||
@ -209,20 +203,3 @@ export function regenerateLinksSupplier(
|
||||
|
||||
return alignedDataFrame;
|
||||
}
|
||||
|
||||
export function prepareTimeSeriesLong(series: DataFrame[]): DataFrame[] {
|
||||
// Transform each dataframe of the series
|
||||
// to handle different field names in different frames
|
||||
return series.reduce((acc: DataFrame[], dataFrame: DataFrame) => {
|
||||
// these could be different in each frame
|
||||
const stringFields = dataFrame.fields.filter((field) => field.type === FieldType.string).map((field) => field.name);
|
||||
|
||||
// transform one dataFrame at a time and concat into DataFrame[]
|
||||
const transformedSeries = partitionByValuesTransformer.transformer(
|
||||
{ fields: stringFields },
|
||||
{ interpolate: (value: string) => value }
|
||||
)([dataFrame]);
|
||||
|
||||
return acc.concat(transformedSeries);
|
||||
}, []);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user