mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Transformations: Add context parameter to transformDataFrame and operators (#60694)
* Transformations: Add context parameter to transformDataFrame and operators * Remove unused queries prop * Fixed test * Fixed test
This commit is contained in:
parent
6da850a2f2
commit
bd90a6e1be
@ -4353,9 +4353,8 @@ exports[`better eslint`] = {
|
||||
],
|
||||
"public/app/features/query/state/PanelQueryRunner.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "3"]
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"]
|
||||
],
|
||||
"public/app/features/query/state/runRequest.test.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
|
@ -1,12 +1,12 @@
|
||||
import { MonoTypeOperatorFunction, Observable, of } from 'rxjs';
|
||||
import { map, mergeMap } from 'rxjs/operators';
|
||||
|
||||
import { DataFrame, DataTransformerConfig } from '../types';
|
||||
import { DataFrame, DataTransformContext, DataTransformerConfig } from '../types';
|
||||
|
||||
import { standardTransformersRegistry, TransformerRegistryItem } from './standardTransformersRegistry';
|
||||
|
||||
const getOperator =
|
||||
(config: DataTransformerConfig): MonoTypeOperatorFunction<DataFrame[]> =>
|
||||
(config: DataTransformerConfig, ctx: DataTransformContext): MonoTypeOperatorFunction<DataFrame[]> =>
|
||||
(source) => {
|
||||
const info = standardTransformersRegistry.get(config.id);
|
||||
|
||||
@ -19,7 +19,7 @@ const getOperator =
|
||||
|
||||
return source.pipe(
|
||||
mergeMap((before) =>
|
||||
of(before).pipe(info.transformation.operator(options, config.replace), postProcessTransform(before, info))
|
||||
of(before).pipe(info.transformation.operator(options, ctx), postProcessTransform(before, info))
|
||||
)
|
||||
);
|
||||
};
|
||||
@ -53,7 +53,11 @@ const postProcessTransform =
|
||||
/**
|
||||
* Apply configured transformations to the input data
|
||||
*/
|
||||
export function transformDataFrame(options: DataTransformerConfig[], data: DataFrame[]): Observable<DataFrame[]> {
|
||||
export function transformDataFrame(
|
||||
options: DataTransformerConfig[],
|
||||
data: DataFrame[],
|
||||
ctx?: DataTransformContext
|
||||
): Observable<DataFrame[]> {
|
||||
const stream = of<DataFrame[]>(data);
|
||||
|
||||
if (!options.length) {
|
||||
@ -61,6 +65,7 @@ export function transformDataFrame(options: DataTransformerConfig[], data: DataF
|
||||
}
|
||||
|
||||
const operators: Array<MonoTypeOperatorFunction<DataFrame[]>> = [];
|
||||
const context = ctx ?? { interpolate: (str) => str };
|
||||
|
||||
for (let index = 0; index < options.length; index++) {
|
||||
const config = options[index];
|
||||
@ -69,7 +74,7 @@ export function transformDataFrame(options: DataTransformerConfig[], data: DataF
|
||||
continue;
|
||||
}
|
||||
|
||||
operators.push(getOperator(config));
|
||||
operators.push(getOperator(config, context));
|
||||
}
|
||||
|
||||
// @ts-ignore TypeScript has a hard time understanding this construct
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { DataFrameView } from '../../dataframe';
|
||||
import { toDataFrame } from '../../dataframe/processDataFrame';
|
||||
import { ScopedVars } from '../../types';
|
||||
import { DataTransformContext, ScopedVars } from '../../types';
|
||||
import { FieldType } from '../../types/dataFrame';
|
||||
import { BinaryOperationID } from '../../utils';
|
||||
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
|
||||
@ -235,7 +235,9 @@ describe('calculateField transformer w/ timeseries', () => {
|
||||
},
|
||||
replaceFields: true,
|
||||
},
|
||||
replace: (target: string | undefined, scopedVars?: ScopedVars, format?: string | Function): string => {
|
||||
};
|
||||
const context: DataTransformContext = {
|
||||
interpolate: (target: string | undefined, scopedVars?: ScopedVars, format?: string | Function): string => {
|
||||
if (!target) {
|
||||
return '';
|
||||
}
|
||||
@ -262,7 +264,7 @@ describe('calculateField transformer w/ timeseries', () => {
|
||||
},
|
||||
};
|
||||
|
||||
await expect(transformDataFrame([cfg], [seriesA])).toEmitValuesWith((received) => {
|
||||
await expect(transformDataFrame([cfg], [seriesA], context)).toEmitValuesWith((received) => {
|
||||
const data = received[0];
|
||||
const filtered = data[0];
|
||||
const rows = new DataFrameView(filtered).toArray();
|
||||
|
@ -72,11 +72,15 @@ export const calculateFieldTransformer: DataTransformerInfo<CalculateFieldTransf
|
||||
reducer: ReducerID.sum,
|
||||
},
|
||||
},
|
||||
operator: (options, replace) => (outerSource) => {
|
||||
operator: (options, ctx) => (outerSource) => {
|
||||
const operator =
|
||||
options && options.timeSeries !== false ? ensureColumnsTransformer.operator(null) : noopTransformer.operator({});
|
||||
options && options.timeSeries !== false
|
||||
? ensureColumnsTransformer.operator(null, ctx)
|
||||
: noopTransformer.operator({}, ctx);
|
||||
|
||||
options.alias = replace ? replace(options.alias) : options.alias;
|
||||
if (options.alias != null) {
|
||||
options.alias = ctx.interpolate(options.alias);
|
||||
}
|
||||
|
||||
return outerSource.pipe(
|
||||
operator,
|
||||
@ -87,13 +91,12 @@ export const calculateFieldTransformer: DataTransformerInfo<CalculateFieldTransf
|
||||
if (mode === CalculateFieldMode.ReduceRow) {
|
||||
creator = getReduceRowCreator(defaults(options.reduce, defaultReduceOptions), data);
|
||||
} else if (mode === CalculateFieldMode.BinaryOperation) {
|
||||
const binaryOptions = replace
|
||||
? {
|
||||
...options.binary,
|
||||
left: replace ? replace(options.binary?.left) : options.binary?.left,
|
||||
right: replace ? replace(options.binary?.right) : options.binary?.right,
|
||||
}
|
||||
: options.binary;
|
||||
const binaryOptions = {
|
||||
...options.binary,
|
||||
left: ctx.interpolate(options.binary?.left!),
|
||||
right: ctx.interpolate(options.binary?.right!),
|
||||
};
|
||||
|
||||
creator = getBinaryCreator(defaults(binaryOptions, defaultBinaryOptions), data);
|
||||
}
|
||||
|
||||
|
@ -37,7 +37,8 @@ export const convertFieldTypeTransformer: SynchronousDataTransformerInfo<Convert
|
||||
conversions: [{ targetField: undefined, destinationType: undefined, dateFormat: undefined }],
|
||||
},
|
||||
|
||||
operator: (options) => (source) => source.pipe(map((data) => convertFieldTypeTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => convertFieldTypeTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: ConvertFieldTypeTransformerOptions) => (data: DataFrame[]) => {
|
||||
if (!Array.isArray(data) || data.length === 0) {
|
||||
|
@ -12,16 +12,20 @@ export const ensureColumnsTransformer: SynchronousDataTransformerInfo = {
|
||||
name: 'Ensure Columns Transformer',
|
||||
description: 'Will check if current data frames is series or columns. If in series it will convert to columns.',
|
||||
|
||||
operator: (options) => (source) => source.pipe(map((data) => ensureColumnsTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => ensureColumnsTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: any) => (frames: DataFrame[]) => {
|
||||
transformer: (_options: any, ctx) => (frames: DataFrame[]) => {
|
||||
// Assume timeseries should first be joined by time
|
||||
const timeFieldName = findConsistentTimeFieldName(frames);
|
||||
|
||||
if (frames.length > 1 && timeFieldName) {
|
||||
return joinByFieldTransformer.transformer({
|
||||
byField: timeFieldName,
|
||||
})(frames);
|
||||
return joinByFieldTransformer.transformer(
|
||||
{
|
||||
byField: timeFieldName,
|
||||
},
|
||||
ctx
|
||||
)(frames);
|
||||
}
|
||||
return frames;
|
||||
},
|
||||
|
@ -22,23 +22,21 @@ export const filterFieldsTransformer: DataTransformerInfo<FilterOptions> = {
|
||||
* Return a modified copy of the series. If the transform is not or should not
|
||||
* be applied, just return the input series
|
||||
*/
|
||||
operator: (options: FilterOptions, replace) => (source) => {
|
||||
operator: (options: FilterOptions, ctx) => (source) => {
|
||||
if (!options.include && !options.exclude) {
|
||||
return source.pipe(noopTransformer.operator({}, replace));
|
||||
return source.pipe(noopTransformer.operator({}, ctx));
|
||||
}
|
||||
|
||||
if (replace) {
|
||||
if (typeof options.include?.options === 'string') {
|
||||
options.include.options = replace(options.include?.options);
|
||||
} else if (typeof options.include?.options?.pattern === 'string') {
|
||||
options.include.options.pattern = replace(options.include?.options.pattern);
|
||||
}
|
||||
if (typeof options.include?.options === 'string') {
|
||||
options.include.options = ctx.interpolate(options.include?.options);
|
||||
} else if (typeof options.include?.options?.pattern === 'string') {
|
||||
options.include.options.pattern = ctx.interpolate(options.include?.options.pattern);
|
||||
}
|
||||
|
||||
if (typeof options.exclude?.options === 'string') {
|
||||
options.exclude.options = replace(options.exclude?.options);
|
||||
} else if (typeof options.exclude?.options?.pattern === 'string') {
|
||||
options.exclude.options.pattern = replace(options.exclude?.options.pattern);
|
||||
}
|
||||
if (typeof options.exclude?.options === 'string') {
|
||||
options.exclude.options = ctx.interpolate(options.exclude?.options);
|
||||
} else if (typeof options.exclude?.options?.pattern === 'string') {
|
||||
options.exclude.options.pattern = ctx.interpolate(options.exclude?.options.pattern);
|
||||
}
|
||||
|
||||
return source.pipe(
|
||||
@ -91,9 +89,9 @@ export const filterFramesTransformer: DataTransformerInfo<FilterOptions> = {
|
||||
* Return a modified copy of the series. If the transform is not or should not
|
||||
* be applied, just return the input series
|
||||
*/
|
||||
operator: (options) => (source) => {
|
||||
operator: (options, ctx) => (source) => {
|
||||
if (!options.include && !options.exclude) {
|
||||
return source.pipe(noopTransformer.operator({}));
|
||||
return source.pipe(noopTransformer.operator({}, ctx));
|
||||
}
|
||||
|
||||
return source.pipe(
|
||||
|
@ -205,7 +205,10 @@ describe('filterByName transformer', () => {
|
||||
pattern: '/^$var1/',
|
||||
},
|
||||
},
|
||||
replace: (target: string | undefined, scopedVars?: ScopedVars, format?: string | Function): string => {
|
||||
};
|
||||
|
||||
const ctx = {
|
||||
interpolate: (target: string | undefined, scopedVars?: ScopedVars, format?: string | Function): string => {
|
||||
if (!target) {
|
||||
return '';
|
||||
}
|
||||
@ -222,7 +225,7 @@ describe('filterByName transformer', () => {
|
||||
},
|
||||
};
|
||||
|
||||
await expect(transformDataFrame([cfg], [seriesWithNamesToMatch])).toEmitValuesWith((received) => {
|
||||
await expect(transformDataFrame([cfg], [seriesWithNamesToMatch], ctx)).toEmitValuesWith((received) => {
|
||||
const data = received[0];
|
||||
const filtered = data[0];
|
||||
expect(filtered.fields.length).toBe(2);
|
||||
|
@ -19,7 +19,7 @@ export const filterFramesByRefIdTransformer: DataTransformerInfo<FilterFramesByR
|
||||
* Return a modified copy of the series. If the transform is not or should not
|
||||
* be applied, just return the input series
|
||||
*/
|
||||
operator: (options) => (source) => {
|
||||
operator: (options, ctx) => (source) => {
|
||||
const filterOptions: FilterOptions = {};
|
||||
if (options.include) {
|
||||
filterOptions.include = {
|
||||
@ -34,6 +34,6 @@ export const filterFramesByRefIdTransformer: DataTransformerInfo<FilterFramesByR
|
||||
};
|
||||
}
|
||||
|
||||
return source.pipe(filterFramesTransformer.operator(filterOptions));
|
||||
return source.pipe(filterFramesTransformer.operator(filterOptions, ctx));
|
||||
},
|
||||
};
|
||||
|
@ -40,13 +40,13 @@ export const filterByValueTransformer: DataTransformerInfo<FilterByValueTransfor
|
||||
match: FilterByValueMatch.any,
|
||||
},
|
||||
|
||||
operator: (options) => (source) => {
|
||||
operator: (options, ctx) => (source) => {
|
||||
const filters = options.filters;
|
||||
const matchAll = options.match === FilterByValueMatch.all;
|
||||
const include = options.type === FilterByValueType.include;
|
||||
|
||||
if (!Array.isArray(filters) || filters.length === 0) {
|
||||
return source.pipe(noopTransformer.operator({}));
|
||||
return source.pipe(noopTransformer.operator({}, ctx));
|
||||
}
|
||||
|
||||
return source.pipe(
|
||||
|
@ -82,7 +82,8 @@ export const histogramTransformer: SynchronousDataTransformerInfo<HistogramTrans
|
||||
fields: {},
|
||||
},
|
||||
|
||||
operator: (options) => (source) => source.pipe(map((data) => histogramTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => histogramTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: HistogramTransformerOptions) => (data: DataFrame[]) => {
|
||||
if (!Array.isArray(data) || data.length === 0) {
|
||||
|
@ -28,7 +28,8 @@ export const joinByFieldTransformer: SynchronousDataTransformerInfo<JoinByFieldO
|
||||
mode: JoinMode.outer,
|
||||
},
|
||||
|
||||
operator: (options) => (source) => source.pipe(map((data) => joinByFieldTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => joinByFieldTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: JoinByFieldOptions) => {
|
||||
let joinBy: FieldMatcher | undefined = undefined;
|
||||
|
@ -28,7 +28,8 @@ export const labelsToFieldsTransformer: SynchronousDataTransformerInfo<LabelsToF
|
||||
description: 'Extract time series labels to fields (columns or rows)',
|
||||
defaultOptions: {},
|
||||
|
||||
operator: (options) => (source) => source.pipe(map((data) => labelsToFieldsTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => labelsToFieldsTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: LabelsToFieldsOptions) => (data: DataFrame[]) => {
|
||||
// Show each label as a field row
|
||||
|
@ -25,13 +25,16 @@ export const organizeFieldsTransformer: DataTransformerInfo<OrganizeFieldsTransf
|
||||
* Return a modified copy of the series. If the transform is not or should not
|
||||
* be applied, just return the input series
|
||||
*/
|
||||
operator: (options) => (source) =>
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(
|
||||
filterFieldsByNameTransformer.operator({
|
||||
exclude: { names: mapToExcludeArray(options.excludeByName) },
|
||||
}),
|
||||
orderFieldsTransformer.operator(options),
|
||||
renameFieldsTransformer.operator(options)
|
||||
filterFieldsByNameTransformer.operator(
|
||||
{
|
||||
exclude: { names: mapToExcludeArray(options.excludeByName) },
|
||||
},
|
||||
ctx
|
||||
),
|
||||
orderFieldsTransformer.operator(options, ctx),
|
||||
renameFieldsTransformer.operator(options, ctx)
|
||||
),
|
||||
};
|
||||
|
||||
|
@ -2,8 +2,15 @@ import { MonoTypeOperatorFunction } from 'rxjs';
|
||||
|
||||
import { RegistryItemWithOptions } from '../utils/Registry';
|
||||
|
||||
import { ScopedVars } from './ScopedVars';
|
||||
import { DataFrame, Field } from './dataFrame';
|
||||
import { InterpolateFunction } from './panel';
|
||||
|
||||
/**
|
||||
* Context passed to transformDataFrame and to each transform operator
|
||||
*/
|
||||
export interface DataTransformContext {
|
||||
interpolate: InterpolateFunction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Function that transform data frames (AKA transformer)
|
||||
@ -15,10 +22,7 @@ export interface DataTransformerInfo<TOptions = any> extends RegistryItemWithOpt
|
||||
* Function that configures transformation and returns a transformer
|
||||
* @param options
|
||||
*/
|
||||
operator: (
|
||||
options: TOptions,
|
||||
replace?: (target?: string, scopedVars?: ScopedVars, format?: string | Function) => string
|
||||
) => MonoTypeOperatorFunction<DataFrame[]>;
|
||||
operator: (options: TOptions, context: DataTransformContext) => MonoTypeOperatorFunction<DataFrame[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -28,7 +32,7 @@ export interface DataTransformerInfo<TOptions = any> extends RegistryItemWithOpt
|
||||
* @public
|
||||
*/
|
||||
export interface SynchronousDataTransformerInfo<TOptions = any> extends DataTransformerInfo<TOptions> {
|
||||
transformer: (options: TOptions) => (frames: DataFrame[]) => DataFrame[];
|
||||
transformer: (options: TOptions, context: DataTransformContext) => (frames: DataFrame[]) => DataFrame[];
|
||||
}
|
||||
|
||||
/**
|
||||
@ -47,10 +51,6 @@ export interface DataTransformerConfig<TOptions = any> {
|
||||
* Options to be passed to the transformer
|
||||
*/
|
||||
options: TOptions;
|
||||
/**
|
||||
* Function to apply template variable substitution to the DataTransformerConfig
|
||||
*/
|
||||
replace?: (target?: string, scopedVars?: ScopedVars, format?: string | Function) => string;
|
||||
}
|
||||
|
||||
export type FrameMatcher = (frame: DataFrame) => boolean;
|
||||
|
@ -10,6 +10,7 @@ import {
|
||||
AnnotationSupport,
|
||||
DataFrame,
|
||||
DataSourceApi,
|
||||
DataTransformContext,
|
||||
Field,
|
||||
FieldType,
|
||||
getFieldDisplayName,
|
||||
@ -66,8 +67,12 @@ export function singleFrameFromPanelData(): OperatorFunction<DataFrame[], DataFr
|
||||
return of(data[0]);
|
||||
}
|
||||
|
||||
const ctx: DataTransformContext = {
|
||||
interpolate: (v: string) => v,
|
||||
};
|
||||
|
||||
return of(data).pipe(
|
||||
standardTransformers.mergeTransformer.operator({}),
|
||||
standardTransformers.mergeTransformer.operator({}, ctx),
|
||||
map((d) => d[0])
|
||||
);
|
||||
})
|
||||
|
@ -133,13 +133,16 @@ export const decorateWithTableResult = (data: ExplorePanelData): Observable<Expl
|
||||
});
|
||||
|
||||
const hasOnlyTimeseries = data.tableFrames.every((df) => isTimeSeries(df));
|
||||
const transformContext = {
|
||||
interpolate: (v: string) => v,
|
||||
};
|
||||
|
||||
// If we have only timeseries we do join on default time column which makes more sense. If we are showing
|
||||
// non timeseries or some mix of data we are not trying to join on anything and just try to merge them in
|
||||
// single table, which may not make sense in most cases, but it's up to the user to query something sensible.
|
||||
const transformer = hasOnlyTimeseries
|
||||
? of(data.tableFrames).pipe(standardTransformers.joinByFieldTransformer.operator({}))
|
||||
: of(data.tableFrames).pipe(standardTransformers.mergeTransformer.operator({}));
|
||||
? of(data.tableFrames).pipe(standardTransformers.joinByFieldTransformer.operator({}, transformContext))
|
||||
: of(data.tableFrames).pipe(standardTransformers.mergeTransformer.operator({}, transformContext));
|
||||
|
||||
return transformer.pipe(
|
||||
map((frames) => {
|
||||
@ -183,13 +186,16 @@ export const decorateWithRawPrometheusResult = (data: ExplorePanelData): Observa
|
||||
});
|
||||
|
||||
const hasOnlyTimeseries = tableFrames.every((df) => isTimeSeries(df));
|
||||
const transformContext = {
|
||||
interpolate: (v: string) => v,
|
||||
};
|
||||
|
||||
// If we have only timeseries we do join on default time column which makes more sense. If we are showing
|
||||
// non timeseries or some mix of data we are not trying to join on anything and just try to merge them in
|
||||
// single table, which may not make sense in most cases, but it's up to the user to query something sensible.
|
||||
const transformer = hasOnlyTimeseries
|
||||
? of(tableFrames).pipe(standardTransformers.joinByFieldTransformer.operator({}))
|
||||
: of(tableFrames).pipe(standardTransformers.mergeTransformer.operator({}));
|
||||
? of(tableFrames).pipe(standardTransformers.joinByFieldTransformer.operator({}, transformContext))
|
||||
: of(tableFrames).pipe(standardTransformers.mergeTransformer.operator({}, transformContext));
|
||||
|
||||
return transformer.pipe(
|
||||
map((frames) => {
|
||||
|
@ -14,6 +14,7 @@ import {
|
||||
DataSourceApi,
|
||||
DataSourceJsonData,
|
||||
DataSourceRef,
|
||||
DataTransformContext,
|
||||
DataTransformerConfig,
|
||||
getDefaultTimeRange,
|
||||
LoadingState,
|
||||
@ -187,14 +188,11 @@ export class PanelQueryRunner {
|
||||
return of(data);
|
||||
}
|
||||
|
||||
const replace = (option: string): string => {
|
||||
return getTemplateSrv().replace(option, data?.request?.scopedVars);
|
||||
const ctx: DataTransformContext = {
|
||||
interpolate: (v: string) => getTemplateSrv().replace(v, data?.request?.scopedVars),
|
||||
};
|
||||
transformations.forEach((transform: any) => {
|
||||
transform.replace = replace;
|
||||
});
|
||||
|
||||
return transformDataFrame(transformations, data.series).pipe(map((series) => ({ ...data, series })));
|
||||
return transformDataFrame(transformations, data.series, ctx).pipe(map((series) => ({ ...data, series })));
|
||||
})
|
||||
);
|
||||
};
|
||||
|
@ -193,15 +193,13 @@ export const getTransformationsStream: (
|
||||
return of(data);
|
||||
}
|
||||
|
||||
const replace: (option?: string) => string = (option) => {
|
||||
return sceneGraph.interpolate(sceneObject, option, data?.request?.scopedVars);
|
||||
const ctx = {
|
||||
interpolate: (value: string) => {
|
||||
return sceneGraph.interpolate(sceneObject, value, data?.request?.scopedVars);
|
||||
},
|
||||
};
|
||||
|
||||
transformations.forEach((transform: DataTransformerConfig) => {
|
||||
transform.replace = replace;
|
||||
});
|
||||
|
||||
return transformDataFrame(transformations, data.series).pipe(map((series) => ({ ...data, series })));
|
||||
return transformDataFrame(transformations, data.series, ctx).pipe(map((series) => ({ ...data, series })));
|
||||
})
|
||||
);
|
||||
};
|
||||
|
@ -32,7 +32,8 @@ export const heatmapTransformer: SynchronousDataTransformerInfo<HeatmapTransform
|
||||
description: 'calculate heatmap from source data',
|
||||
defaultOptions: {},
|
||||
|
||||
operator: (options) => (source) => source.pipe(map((data) => heatmapTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => heatmapTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: HeatmapTransformerOptions) => {
|
||||
return (data: DataFrame[]) => {
|
||||
|
@ -68,9 +68,10 @@ export class CalculateFieldTransformerEditor extends React.PureComponent<
|
||||
private initOptions() {
|
||||
const { options } = this.props;
|
||||
const configuredOptions = options?.reduce?.include || [];
|
||||
const ctx = { interpolate: (v: string) => v };
|
||||
const subscription = of(this.props.input)
|
||||
.pipe(
|
||||
standardTransformers.ensureColumnsTransformer.operator(null),
|
||||
standardTransformers.ensureColumnsTransformer.operator(null, ctx),
|
||||
this.extractAllNames(),
|
||||
this.extractNamesAndSelected(configuredOptions)
|
||||
)
|
||||
|
@ -8,12 +8,13 @@ describe('Fields from JSON', () => {
|
||||
source: 'line',
|
||||
replace: true,
|
||||
};
|
||||
const ctx = { interpolate: (v: string) => v };
|
||||
const data = toDataFrame({
|
||||
columns: ['ts', 'line'],
|
||||
rows: appl,
|
||||
});
|
||||
|
||||
const frames = extractFieldsTransformer.transformer(cfg)([data]);
|
||||
const frames = extractFieldsTransformer.transformer(cfg, ctx)([data]);
|
||||
expect(frames.length).toEqual(1);
|
||||
expect(
|
||||
frames[0].fields.reduce((acc, v) => {
|
||||
|
@ -26,7 +26,8 @@ export const extractFieldsTransformer: SynchronousDataTransformerInfo<ExtractFie
|
||||
description: 'Parse fields from the contends of another',
|
||||
defaultOptions: {},
|
||||
|
||||
operator: (options) => (source) => source.pipe(map((data) => extractFieldsTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => extractFieldsTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: ExtractFieldsOptions) => {
|
||||
return (data: DataFrame[]) => {
|
||||
|
@ -22,7 +22,8 @@ export const joinByLabelsTransformer: SynchronousDataTransformerInfo<JoinByLabel
|
||||
description: 'Flatten labeled results into a table joined by labels',
|
||||
defaultOptions: {},
|
||||
|
||||
operator: (options) => (source) => source.pipe(map((data) => joinByLabelsTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => joinByLabelsTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: JoinByLabelsTransformOptions) => {
|
||||
return (data: DataFrame[]) => {
|
||||
|
@ -2,6 +2,10 @@ import { toDataFrame, FieldType } from '@grafana/data';
|
||||
|
||||
import { partitionByValuesTransformer, PartitionByValuesTransformerOptions } from './partitionByValues';
|
||||
|
||||
const ctx = {
|
||||
interpolate: (v: string) => v,
|
||||
};
|
||||
|
||||
describe('Partition by values transformer', () => {
|
||||
it('should partition by one field', () => {
|
||||
const source = [
|
||||
@ -19,7 +23,7 @@ describe('Partition by values transformer', () => {
|
||||
fields: ['region'],
|
||||
};
|
||||
|
||||
let partitioned = partitionByValuesTransformer.transformer(config)(source);
|
||||
let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source);
|
||||
|
||||
expect(partitioned.length).toEqual(2);
|
||||
|
||||
@ -55,7 +59,7 @@ describe('Partition by values transformer', () => {
|
||||
fields: ['region', 'status'],
|
||||
};
|
||||
|
||||
let partitioned = partitionByValuesTransformer.transformer(config)(source);
|
||||
let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source);
|
||||
|
||||
expect(partitioned.length).toEqual(4);
|
||||
|
||||
@ -116,7 +120,7 @@ describe('Partition by values transformer', () => {
|
||||
},
|
||||
};
|
||||
|
||||
let partitioned = partitionByValuesTransformer.transformer(config)(source);
|
||||
let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source);
|
||||
|
||||
expect(partitioned[0].name).toEqual('region=Europe status=OK');
|
||||
expect(partitioned[1].name).toEqual('region=Europe status=FAIL');
|
||||
@ -144,7 +148,7 @@ describe('Partition by values transformer', () => {
|
||||
},
|
||||
};
|
||||
|
||||
let partitioned = partitionByValuesTransformer.transformer(config)(source);
|
||||
let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source);
|
||||
|
||||
expect(partitioned[0].name).toEqual('XYZ Europe OK');
|
||||
expect(partitioned[1].name).toEqual('XYZ Europe FAIL');
|
||||
@ -173,7 +177,7 @@ describe('Partition by values transformer', () => {
|
||||
},
|
||||
};
|
||||
|
||||
let partitioned = partitionByValuesTransformer.transformer(config)(source);
|
||||
let partitioned = partitionByValuesTransformer.transformer(config, ctx)(source);
|
||||
|
||||
expect(partitioned[0].name).toEqual('XYZ region=Europe status=OK');
|
||||
expect(partitioned[1].name).toEqual('XYZ region=Europe status=FAIL');
|
||||
|
@ -6,6 +6,7 @@ import {
|
||||
DataTransformerID,
|
||||
SynchronousDataTransformerInfo,
|
||||
getFieldMatcher,
|
||||
DataTransformContext,
|
||||
} from '@grafana/data';
|
||||
import { getMatcherConfig } from '@grafana/data/src/transformations/transformers/filterByName';
|
||||
import { noopTransformer } from '@grafana/data/src/transformations/transformers/noop';
|
||||
@ -49,14 +50,14 @@ export const partitionByValuesTransformer: SynchronousDataTransformerInfo<Partit
|
||||
description: `Splits a one-frame dataset into multiple series discriminated by unique/enum values in one or more fields.`,
|
||||
defaultOptions: {},
|
||||
|
||||
operator: (options) => (source) =>
|
||||
source.pipe(map((data) => partitionByValuesTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => partitionByValuesTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: PartitionByValuesTransformerOptions) => {
|
||||
transformer: (options: PartitionByValuesTransformerOptions, ctx: DataTransformContext) => {
|
||||
const matcherConfig = getMatcherConfig({ names: options.fields });
|
||||
|
||||
if (!matcherConfig) {
|
||||
return noopTransformer.transformer({});
|
||||
return noopTransformer.transformer({}, ctx);
|
||||
}
|
||||
|
||||
const matcher = getFieldMatcher(matcherConfig);
|
||||
|
@ -11,6 +11,10 @@ import {
|
||||
|
||||
import { prepareTimeSeriesTransformer, PrepareTimeSeriesOptions, timeSeriesFormat } from './prepareTimeSeries';
|
||||
|
||||
const ctx = {
|
||||
interpolate: (v: string) => v,
|
||||
};
|
||||
|
||||
describe('Prepare time series transformer', () => {
|
||||
it('should transform wide to multi', () => {
|
||||
const source = [
|
||||
@ -29,7 +33,7 @@ describe('Prepare time series transformer', () => {
|
||||
format: timeSeriesFormat.TimeSeriesMulti,
|
||||
};
|
||||
|
||||
expect(prepareTimeSeriesTransformer.transformer(config)(source)).toEqual([
|
||||
expect(prepareTimeSeriesTransformer.transformer(config, ctx)(source)).toEqual([
|
||||
toEquableDataFrame({
|
||||
name: 'wide',
|
||||
refId: 'A',
|
||||
@ -75,7 +79,7 @@ describe('Prepare time series transformer', () => {
|
||||
format: timeSeriesFormat.TimeSeriesMulti,
|
||||
};
|
||||
|
||||
const frames = prepareTimeSeriesTransformer.transformer(config)(source);
|
||||
const frames = prepareTimeSeriesTransformer.transformer(config, ctx)(source);
|
||||
expect(frames.length).toEqual(4);
|
||||
expect(
|
||||
frames.map((f) => ({
|
||||
@ -171,7 +175,7 @@ describe('Prepare time series transformer', () => {
|
||||
format: timeSeriesFormat.TimeSeriesMulti,
|
||||
};
|
||||
|
||||
expect(prepareTimeSeriesTransformer.transformer(config)(source)).toEqual([
|
||||
expect(prepareTimeSeriesTransformer.transformer(config, ctx)(source)).toEqual([
|
||||
toEquableDataFrame({
|
||||
name: 'wide',
|
||||
refId: 'A',
|
||||
@ -235,7 +239,7 @@ describe('Prepare time series transformer', () => {
|
||||
format: timeSeriesFormat.TimeSeriesMulti,
|
||||
};
|
||||
|
||||
expect(toEquableDataFrames(prepareTimeSeriesTransformer.transformer(config)(source))).toEqual(
|
||||
expect(toEquableDataFrames(prepareTimeSeriesTransformer.transformer(config, ctx)(source))).toEqual(
|
||||
toEquableDataFrames(
|
||||
source.map((frame) => ({
|
||||
...frame,
|
||||
@ -273,7 +277,7 @@ describe('Prepare time series transformer', () => {
|
||||
format: timeSeriesFormat.TimeSeriesMulti,
|
||||
};
|
||||
|
||||
expect(prepareTimeSeriesTransformer.transformer(config)(source)).toEqual([]);
|
||||
expect(prepareTimeSeriesTransformer.transformer(config, ctx)(source)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should convert long to multi', () => {
|
||||
@ -293,7 +297,7 @@ describe('Prepare time series transformer', () => {
|
||||
format: timeSeriesFormat.TimeSeriesMulti,
|
||||
};
|
||||
|
||||
const frames = prepareTimeSeriesTransformer.transformer(config)(source);
|
||||
const frames = prepareTimeSeriesTransformer.transformer(config, ctx)(source);
|
||||
expect(frames).toEqual([
|
||||
toEquableDataFrame({
|
||||
name: 'long',
|
||||
@ -339,7 +343,7 @@ describe('Prepare time series transformer', () => {
|
||||
format: timeSeriesFormat.TimeSeriesMany,
|
||||
};
|
||||
|
||||
const frames = prepareTimeSeriesTransformer.transformer(config)(source);
|
||||
const frames = prepareTimeSeriesTransformer.transformer(config, ctx)(source);
|
||||
expect(frames).toEqual([
|
||||
toEquableDataFrame({
|
||||
name: 'wants-to-be-many',
|
||||
|
@ -288,8 +288,8 @@ export const prepareTimeSeriesTransformer: SynchronousDataTransformerInfo<Prepar
|
||||
description: `Will stretch data frames from the wide format into the long format. This is really helpful to be able to keep backwards compatibility for panels not supporting the new wide format.`,
|
||||
defaultOptions: {},
|
||||
|
||||
operator: (options) => (source) =>
|
||||
source.pipe(map((data) => prepareTimeSeriesTransformer.transformer(options)(data))),
|
||||
operator: (options, ctx) => (source) =>
|
||||
source.pipe(map((data) => prepareTimeSeriesTransformer.transformer(options, ctx)(data))),
|
||||
|
||||
transformer: (options: PrepareTimeSeriesOptions) => {
|
||||
const format = options?.format ?? timeSeriesFormat.TimeSeriesWide;
|
||||
|
Loading…
Reference in New Issue
Block a user