mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
FieldValues: Use simple array in transformations and grafana/data (#66702)
Co-authored-by: Leon Sorokin <leeoniya@gmail.com>
This commit is contained in:
@@ -32,7 +32,7 @@ export class DataFrameView<T = any> extends FunctionalVector<T> {
|
||||
}
|
||||
|
||||
fields[field.name] = field;
|
||||
const getter = () => field.values.get(this.index);
|
||||
const getter = () => field.values.get(this.index); // .get() to support all Vector types
|
||||
|
||||
if (!(obj as any).hasOwnProperty(field.name)) {
|
||||
Object.defineProperty(obj, field.name, {
|
||||
|
||||
@@ -92,7 +92,7 @@ describe('FieldCache', () => {
|
||||
it('should get the first field with a duplicate name', () => {
|
||||
const field = fieldCache.getFieldByName('value');
|
||||
expect(field!.name).toEqual('value');
|
||||
expect(field!.values.toArray()).toEqual([1, 2, 3]);
|
||||
expect(field!.values).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it('should return index of the field', () => {
|
||||
|
||||
@@ -29,11 +29,11 @@ export const getColumnFromDimension = (dimension: Dimension, column: number) =>
|
||||
};
|
||||
|
||||
export const getValueFromDimension = (dimension: Dimension, column: number, row: number) => {
|
||||
return dimension.columns[column].values.get(row);
|
||||
return dimension.columns[column].values[row];
|
||||
};
|
||||
|
||||
export const getAllValuesFromDimension = (dimension: Dimension, column: number, row: number) => {
|
||||
return dimension.columns.map((c) => c.values.get(row));
|
||||
return dimension.columns.map((c) => c.values[row]);
|
||||
};
|
||||
|
||||
export const getDimensionByName = (dimensions: Dimensions, name: string) => dimensions[name];
|
||||
|
||||
@@ -198,7 +198,7 @@ describe('toDataFrame', () => {
|
||||
},
|
||||
};
|
||||
const dataFrame = toDataFrame(msg);
|
||||
expect(dataFrame.fields.map((f) => ({ [f.name]: f.values.toArray() }))).toMatchInlineSnapshot(`
|
||||
expect(dataFrame.fields.map((f) => ({ [f.name]: f.values }))).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"First": [
|
||||
@@ -366,14 +366,14 @@ describe('sorted DataFrame', () => {
|
||||
it('Should sort numbers', () => {
|
||||
const sorted = sortDataFrame(frame, 0, true);
|
||||
expect(sorted.length).toEqual(3);
|
||||
expect(sorted.fields[0].values.toArray()).toEqual([3, 2, 1]);
|
||||
expect(sorted.fields[1].values.toArray()).toEqual(['c', 'b', 'a']);
|
||||
expect(sorted.fields[0].values).toEqual([3, 2, 1]);
|
||||
expect(sorted.fields[1].values).toEqual(['c', 'b', 'a']);
|
||||
});
|
||||
|
||||
it('Should sort strings', () => {
|
||||
const sorted = sortDataFrame(frame, 1, true);
|
||||
expect(sorted.length).toEqual(3);
|
||||
expect(sorted.fields[0].values.toArray()).toEqual([3, 2, 1]);
|
||||
expect(sorted.fields[1].values.toArray()).toEqual(['c', 'b', 'a']);
|
||||
expect(sorted.fields[0].values).toEqual([3, 2, 1]);
|
||||
expect(sorted.fields[1].values).toEqual(['c', 'b', 'a']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -261,7 +261,7 @@ export function guessFieldTypeForField(field: Field): FieldType | undefined {
|
||||
|
||||
// 2. Check the first non-null value
|
||||
for (let i = 0; i < field.values.length; i++) {
|
||||
const v = field.values.get(i);
|
||||
const v = field.values[i];
|
||||
if (v != null) {
|
||||
return guessFieldTypeFromValue(v);
|
||||
}
|
||||
@@ -362,8 +362,8 @@ export const toLegacyResponseData = (frame: DataFrame): TimeSeries | TableData =
|
||||
// Make sure it is [value,time]
|
||||
for (let i = 0; i < rowCount; i++) {
|
||||
rows.push([
|
||||
valueField.values.get(i), // value
|
||||
timeField.values.get(i), // time
|
||||
valueField.values[i], // value
|
||||
timeField.values[i], // time
|
||||
]);
|
||||
}
|
||||
|
||||
@@ -381,7 +381,7 @@ export const toLegacyResponseData = (frame: DataFrame): TimeSeries | TableData =
|
||||
for (let i = 0; i < rowCount; i++) {
|
||||
const row: any[] = [];
|
||||
for (let j = 0; j < fields.length; j++) {
|
||||
row.push(fields[j].values.get(i));
|
||||
row.push(fields[j].values[i]);
|
||||
}
|
||||
rows.push(row);
|
||||
}
|
||||
@@ -463,7 +463,7 @@ export function reverseDataFrame(data: DataFrame): DataFrame {
|
||||
export function getDataFrameRow(data: DataFrame, row: number): any[] {
|
||||
const values: any[] = [];
|
||||
for (const field of data.fields) {
|
||||
values.push(field.values.get(row));
|
||||
values.push(field.values[row]);
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
@@ -53,8 +53,8 @@ export function getDisplayProcessor(options?: DisplayProcessorOptions): DisplayP
|
||||
unit = `dateTimeAsSystem`;
|
||||
hasDateUnit = true;
|
||||
if (field.values && field.values.length > 1) {
|
||||
let start = field.values.get(0);
|
||||
let end = field.values.get(field.values.length - 1);
|
||||
let start = field.values[0];
|
||||
let end = field.values[field.values.length - 1];
|
||||
if (typeof start === 'string') {
|
||||
start = dateTimeParse(start).unix();
|
||||
end = dateTimeParse(end).unix();
|
||||
|
||||
@@ -18,7 +18,7 @@ export const fieldIndexComparer = (field: Field, reverse = false): IndexComparer
|
||||
case FieldType.boolean:
|
||||
return booleanIndexComparer(values, reverse);
|
||||
case FieldType.time:
|
||||
if (typeof field.values.get(0) === 'number') {
|
||||
if (typeof field.values[0] === 'number') {
|
||||
return timestampIndexComparer(values, reverse);
|
||||
}
|
||||
return timeIndexComparer(values, reverse);
|
||||
@@ -80,39 +80,38 @@ const falsyComparer = (a: unknown, b: unknown): number => {
|
||||
};
|
||||
|
||||
const timestampIndexComparer = (values: Vector<number>, reverse: boolean): IndexComparer => {
|
||||
let vals = values.toArray();
|
||||
let mult = reverse ? -1 : 1;
|
||||
return (a: number, b: number): number => mult * (vals[a] - vals[b]);
|
||||
return (a: number, b: number): number => mult * (values[a] - values[b]);
|
||||
};
|
||||
|
||||
const timeIndexComparer = (values: Vector<unknown>, reverse: boolean): IndexComparer => {
|
||||
return (a: number, b: number): number => {
|
||||
const vA = values.get(a);
|
||||
const vB = values.get(b);
|
||||
const vA = values[a];
|
||||
const vB = values[b];
|
||||
return reverse ? timeComparer(vB, vA) : timeComparer(vA, vB);
|
||||
};
|
||||
};
|
||||
|
||||
const booleanIndexComparer = (values: Vector<boolean>, reverse: boolean): IndexComparer => {
|
||||
return (a: number, b: number): number => {
|
||||
const vA = values.get(a);
|
||||
const vB = values.get(b);
|
||||
const vA = values[a];
|
||||
const vB = values[b];
|
||||
return reverse ? booleanComparer(vB, vA) : booleanComparer(vA, vB);
|
||||
};
|
||||
};
|
||||
|
||||
const numericIndexComparer = (values: Vector<number>, reverse: boolean): IndexComparer => {
|
||||
return (a: number, b: number): number => {
|
||||
const vA = values.get(a);
|
||||
const vB = values.get(b);
|
||||
const vA = values[a];
|
||||
const vB = values[b];
|
||||
return reverse ? numericComparer(vB, vA) : numericComparer(vA, vB);
|
||||
};
|
||||
};
|
||||
|
||||
const stringIndexComparer = (values: Vector<string>, reverse: boolean): IndexComparer => {
|
||||
return (a: number, b: number): number => {
|
||||
const vA = values.get(a);
|
||||
const vB = values.get(b);
|
||||
const vA = values[a];
|
||||
const vB = values[b];
|
||||
return reverse ? stringComparer(vB, vA) : stringComparer(vA, vB);
|
||||
};
|
||||
};
|
||||
|
||||
@@ -140,7 +140,7 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
field.state = setIndexForPaletteColor(field, values.length);
|
||||
|
||||
const scopedVars = getFieldScopedVarsWithDataContexAndRowIndex(field, j);
|
||||
const displayValue = display(field.values.get(j));
|
||||
const displayValue = display(field.values[j]);
|
||||
const rowName = getSmartDisplayNameForRow(dataFrame, field, j, replaceVariables, scopedVars);
|
||||
const overrideColor = lookupRowColorFromOverride(rowName, options.fieldConfig, theme);
|
||||
|
||||
@@ -254,7 +254,7 @@ function getSmartDisplayNameForRow(
|
||||
}
|
||||
|
||||
if (otherField.type === FieldType.string) {
|
||||
const value = otherField.values.get(rowIndex) ?? '';
|
||||
const value = otherField.values[rowIndex] ?? '';
|
||||
const mappedValue = otherField.display ? otherField.display(value).text : value;
|
||||
if (mappedValue.length > 0) {
|
||||
parts.push(mappedValue);
|
||||
|
||||
@@ -935,7 +935,7 @@ describe('applyRawFieldOverrides', () => {
|
||||
|
||||
const getDisplayValue = (frames: DataFrame[], frameIndex: number, fieldIndex: number) => {
|
||||
const field = frames[frameIndex].fields[fieldIndex];
|
||||
const value = field.values.get(0);
|
||||
const value = field.values[0];
|
||||
return field.display!(value);
|
||||
};
|
||||
|
||||
|
||||
@@ -252,7 +252,7 @@ export const fieldReducers = new Registry<FieldReducerInfo>(() => [
|
||||
name: 'All values',
|
||||
description: 'Returns an array with all values',
|
||||
standard: false,
|
||||
reduce: (field: Field) => ({ allValues: field.values.toArray() }),
|
||||
reduce: (field: Field) => ({ allValues: field.values }),
|
||||
},
|
||||
{
|
||||
id: ReducerID.uniqueValues,
|
||||
@@ -260,7 +260,7 @@ export const fieldReducers = new Registry<FieldReducerInfo>(() => [
|
||||
description: 'Returns an array with all unique values',
|
||||
standard: false,
|
||||
reduce: (field: Field) => ({
|
||||
uniqueValues: [...new Set(field.values.toArray())],
|
||||
uniqueValues: [...new Set(field.values)],
|
||||
}),
|
||||
},
|
||||
]);
|
||||
@@ -290,13 +290,13 @@ export function doStandardCalcs(field: Field, ignoreNulls: boolean, nullAsZero:
|
||||
previousDeltaUp: true,
|
||||
};
|
||||
|
||||
const data = field.values;
|
||||
calcs.count = ignoreNulls ? data.length : data.toArray().filter((val) => val != null).length;
|
||||
const data = field.values.toArray(); // toArray() ensures we handle all vector types
|
||||
calcs.count = ignoreNulls ? data.length : data.filter((val) => val != null).length;
|
||||
|
||||
const isNumberField = field.type === FieldType.number || FieldType.time;
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let currentValue = data.get(i);
|
||||
let currentValue = data[i];
|
||||
|
||||
if (i === 0) {
|
||||
calcs.first = currentValue;
|
||||
@@ -404,13 +404,13 @@ export function doStandardCalcs(field: Field, ignoreNulls: boolean, nullAsZero:
|
||||
}
|
||||
|
||||
function calculateFirst(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||
return { first: field.values.get(0) };
|
||||
return { first: field.values[0] };
|
||||
}
|
||||
|
||||
function calculateFirstNotNull(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||
const data = field.values;
|
||||
for (let idx = 0; idx < data.length; idx++) {
|
||||
const v = data.get(idx);
|
||||
const v = data[idx];
|
||||
if (v != null && v !== undefined) {
|
||||
return { firstNotNull: v };
|
||||
}
|
||||
@@ -420,14 +420,14 @@ function calculateFirstNotNull(field: Field, ignoreNulls: boolean, nullAsZero: b
|
||||
|
||||
function calculateLast(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||
const data = field.values;
|
||||
return { last: data.get(data.length - 1) };
|
||||
return { last: data[data.length - 1] };
|
||||
}
|
||||
|
||||
function calculateLastNotNull(field: Field, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||
const data = field.values;
|
||||
let idx = data.length - 1;
|
||||
while (idx >= 0) {
|
||||
const v = data.get(idx--);
|
||||
const v = data[idx--];
|
||||
if (v != null && v !== undefined) {
|
||||
return { lastNotNull: v };
|
||||
}
|
||||
@@ -447,7 +447,7 @@ function calculateStdDev(field: Field, ignoreNulls: boolean, nullAsZero: boolean
|
||||
let runningNonNullCount = 0;
|
||||
const data = field.values;
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
const currentValue = data.get(i);
|
||||
const currentValue = data[i];
|
||||
if (currentValue != null) {
|
||||
runningNonNullCount++;
|
||||
let _oldMean = runningMean;
|
||||
@@ -468,7 +468,7 @@ function calculateChangeCount(field: Field, ignoreNulls: boolean, nullAsZero: bo
|
||||
let first = true;
|
||||
let last = null;
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let currentValue = data.get(i);
|
||||
let currentValue = data[i];
|
||||
if (currentValue === null) {
|
||||
if (ignoreNulls) {
|
||||
continue;
|
||||
@@ -491,7 +491,7 @@ function calculateDistinctCount(field: Field, ignoreNulls: boolean, nullAsZero:
|
||||
const data = field.values;
|
||||
const distinct = new Set();
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let currentValue = data.get(i);
|
||||
let currentValue = data[i];
|
||||
if (currentValue === null) {
|
||||
if (ignoreNulls) {
|
||||
continue;
|
||||
|
||||
@@ -85,7 +85,7 @@ describe('transformDataFrame', () => {
|
||||
const processed = received[0];
|
||||
expect(processed[0].length).toEqual(1);
|
||||
expect(processed[0].fields.length).toEqual(1);
|
||||
expect(processed[0].fields[0].values.get(0)).toEqual(3);
|
||||
expect(processed[0].fields[0].values[0]).toEqual(3);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -112,7 +112,7 @@ describe('transformDataFrame', () => {
|
||||
const processed = received[0];
|
||||
expect(processed[0].length).toEqual(1);
|
||||
expect(processed[0].fields.length).toEqual(2);
|
||||
expect(processed[0].fields[0].values.get(0)).toEqual('temperature');
|
||||
expect(processed[0].fields[0].values[0]).toEqual('temperature');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -142,7 +142,7 @@ describe('transformDataFrame', () => {
|
||||
|
||||
// Only apply A
|
||||
await expect(transformDataFrame(cfg, [frameA, frameB])).toEmitValuesWith((received) => {
|
||||
const processed = received[0].map((v) => v.fields[0].values.toArray());
|
||||
const processed = received[0].map((v) => v.fields[0].values);
|
||||
expect(processed).toBeTruthy();
|
||||
expect(processed).toMatchObject([[5], [7, 8]]);
|
||||
});
|
||||
@@ -150,7 +150,7 @@ describe('transformDataFrame', () => {
|
||||
// Only apply to B
|
||||
cfg[0].filter.options = 'B';
|
||||
await expect(transformDataFrame(cfg, [frameA, frameB])).toEmitValuesWith((received) => {
|
||||
const processed = received[0].map((v) => v.fields[0].values.toArray());
|
||||
const processed = received[0].map((v) => v.fields[0].values);
|
||||
expect(processed).toBeTruthy();
|
||||
expect(processed).toMatchObject([[5, 6], [7]]);
|
||||
});
|
||||
@@ -181,7 +181,7 @@ describe('transformDataFrame', () => {
|
||||
const processed = received[0];
|
||||
expect(processed[0].length).toEqual(1);
|
||||
expect(processed[0].fields.length).toEqual(1);
|
||||
expect(processed[0].fields[0].values.get(0)).toEqual(0.03);
|
||||
expect(processed[0].fields[0].values[0]).toEqual(0.03);
|
||||
});
|
||||
});
|
||||
it('supports trailing custom transformation', async () => {
|
||||
@@ -208,7 +208,7 @@ describe('transformDataFrame', () => {
|
||||
const processed = received[0];
|
||||
expect(processed[0].length).toEqual(1);
|
||||
expect(processed[0].fields.length).toEqual(1);
|
||||
expect(processed[0].fields[0].values.get(0)).toEqual(0.03);
|
||||
expect(processed[0].fields[0].values[0]).toEqual(0.03);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -237,7 +237,7 @@ describe('transformDataFrame', () => {
|
||||
const processed = received[0];
|
||||
expect(processed[0].length).toEqual(1);
|
||||
expect(processed[0].fields.length).toEqual(1);
|
||||
expect(processed[0].fields[0].values.get(0)).toEqual(0.06);
|
||||
expect(processed[0].fields[0].values[0]).toEqual(0.06);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -234,7 +234,7 @@ describe('calculateField transformer w/ timeseries', () => {
|
||||
await expect(transformDataFrame([cfg], [seriesBC])).toEmitValuesWith((received) => {
|
||||
const data = received[0][0];
|
||||
expect(data.fields.length).toEqual(1);
|
||||
expect(data.fields[0].values.toArray()).toEqual([0, 1]);
|
||||
expect(data.fields[0].values).toEqual([0, 1]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import { getTimeField } from '../../dataframe/processDataFrame';
|
||||
import { getFieldDisplayName } from '../../field';
|
||||
import { DataFrame, DataTransformerInfo, Field, FieldType, NullValueMode, Vector } from '../../types';
|
||||
import { BinaryOperationID, binaryOperators } from '../../utils/binaryOperators';
|
||||
import { BinaryOperationVector, ConstantVector, IndexVector } from '../../vector';
|
||||
import { BinaryOperationVector, ConstantVector } from '../../vector';
|
||||
import { AsNumberVector } from '../../vector/AsNumberVector';
|
||||
import { RowVector } from '../../vector/RowVector';
|
||||
import { doStandardCalcs, fieldReducers, ReducerID } from '../fieldReducer';
|
||||
@@ -104,11 +104,8 @@ export const calculateFieldTransformer: DataTransformerInfo<CalculateFieldTransf
|
||||
const f = {
|
||||
name: options.alias ?? 'Row',
|
||||
type: FieldType.number,
|
||||
values: new IndexVector(frame.length),
|
||||
config: {
|
||||
min: 0,
|
||||
max: frame.length - 1,
|
||||
},
|
||||
values: [...Array(frame.length).keys()],
|
||||
config: {},
|
||||
};
|
||||
return {
|
||||
...frame,
|
||||
|
||||
@@ -89,7 +89,7 @@ export function concatenateFields(data: DataFrame[], opts: ConcatenateTransforme
|
||||
if (f.values.length === maxLength) {
|
||||
return f;
|
||||
}
|
||||
const values = f.values.toArray();
|
||||
const values = f.values;
|
||||
values.length = maxLength;
|
||||
return {
|
||||
...f,
|
||||
|
||||
@@ -174,7 +174,7 @@ describe('field convert types transformer', () => {
|
||||
expect(
|
||||
numbers[0].fields.map((f) => ({
|
||||
type: f.type,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toEqual([
|
||||
{ type: FieldType.number, values: [1, 2, 3, 4, 5] },
|
||||
@@ -212,7 +212,7 @@ describe('field convert types transformer', () => {
|
||||
expect(
|
||||
booleans[0].fields.map((f) => ({
|
||||
type: f.type,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toEqual([
|
||||
{
|
||||
@@ -276,7 +276,7 @@ describe('field convert types transformer', () => {
|
||||
expect(
|
||||
complex[0].fields.map((f) => ({
|
||||
type: f.type,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toEqual([
|
||||
{
|
||||
@@ -324,7 +324,7 @@ describe('field convert types transformer', () => {
|
||||
expect(
|
||||
stringified[0].fields.map((f) => ({
|
||||
type: f.type,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toEqual([
|
||||
{
|
||||
@@ -354,7 +354,7 @@ describe('field convert types transformer', () => {
|
||||
],
|
||||
}),
|
||||
])[0].fields[0];
|
||||
expect(stringified.values.toArray()).toEqual([
|
||||
expect(stringified.values).toEqual([
|
||||
'2021-07',
|
||||
'2021-07',
|
||||
'2021-07', // can group by month
|
||||
|
||||
@@ -117,7 +117,7 @@ const iso8601Regex = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{3,})?(?:Z|[-+]
|
||||
export function fieldToTimeField(field: Field, dateFormat?: string): Field {
|
||||
let opts = dateFormat ? { format: dateFormat } : undefined;
|
||||
|
||||
const timeValues = field.values.toArray().slice();
|
||||
const timeValues = field.values.slice();
|
||||
|
||||
let firstDefined = timeValues.find((v) => v != null);
|
||||
|
||||
@@ -140,7 +140,7 @@ export function fieldToTimeField(field: Field, dateFormat?: string): Field {
|
||||
}
|
||||
|
||||
function fieldToNumberField(field: Field): Field {
|
||||
const numValues = field.values.toArray().slice();
|
||||
const numValues = field.values.slice();
|
||||
|
||||
const valuesAsStrings = numValues.some((v) => typeof v === 'string');
|
||||
|
||||
@@ -166,7 +166,7 @@ function fieldToNumberField(field: Field): Field {
|
||||
}
|
||||
|
||||
function fieldToBooleanField(field: Field): Field {
|
||||
const booleanValues = field.values.toArray().slice();
|
||||
const booleanValues = field.values.slice();
|
||||
|
||||
for (let b = 0; b < booleanValues.length; b++) {
|
||||
booleanValues[b] = Boolean(!!booleanValues[b]);
|
||||
@@ -180,7 +180,7 @@ function fieldToBooleanField(field: Field): Field {
|
||||
}
|
||||
|
||||
function fieldToStringField(field: Field, dateFormat?: string): Field {
|
||||
let values = field.values.toArray();
|
||||
let values = field.values;
|
||||
|
||||
switch (field.type) {
|
||||
case FieldType.time:
|
||||
@@ -203,7 +203,7 @@ function fieldToStringField(field: Field, dateFormat?: string): Field {
|
||||
}
|
||||
|
||||
function fieldToComplexField(field: Field): Field {
|
||||
const complexValues = field.values.toArray().slice();
|
||||
const complexValues = field.values.slice();
|
||||
|
||||
for (let s = 0; s < complexValues.length; s++) {
|
||||
try {
|
||||
@@ -229,7 +229,7 @@ function fieldToComplexField(field: Field): Field {
|
||||
* @public
|
||||
*/
|
||||
export function ensureTimeField(field: Field, dateFormat?: string): Field {
|
||||
const firstValueTypeIsNumber = typeof field.values.get(0) === 'number';
|
||||
const firstValueTypeIsNumber = typeof field.values[0] === 'number';
|
||||
if (field.type === FieldType.time && firstValueTypeIsNumber) {
|
||||
return field; //already time
|
||||
}
|
||||
@@ -244,7 +244,7 @@ export function ensureTimeField(field: Field, dateFormat?: string): Field {
|
||||
|
||||
function fieldToEnumField(field: Field, cfg?: EnumFieldConfig): Field {
|
||||
const enumConfig = { ...cfg };
|
||||
const enumValues = field.values.toArray().slice();
|
||||
const enumValues = field.values.slice();
|
||||
const lookup = new Map<unknown, number>();
|
||||
if (enumConfig.text) {
|
||||
for (let i = 0; i < enumConfig.text.length; i++) {
|
||||
|
||||
@@ -100,12 +100,12 @@ export const filterByValueTransformer: DataTransformerInfo<FilterByValueTransfor
|
||||
|
||||
for (let index = 0; index < frame.length; index++) {
|
||||
if (include && rows.has(index)) {
|
||||
buffer.push(field.values.get(index));
|
||||
buffer.push(field.values[index]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!include && !rows.has(index)) {
|
||||
buffer.push(field.values.get(index));
|
||||
buffer.push(field.values[index]);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ export const groupByTransformer: DataTransformerInfo<GroupByTransformerOptions>
|
||||
// group for a given field.
|
||||
const valuesByGroupKey = new Map<string, Record<string, MutableField>>();
|
||||
for (let rowIndex = 0; rowIndex < frame.length; rowIndex++) {
|
||||
const groupKey = String(groupByFields.map((field) => field.values.get(rowIndex)));
|
||||
const groupKey = String(groupByFields.map((field) => field.values[rowIndex]));
|
||||
const valuesByField = valuesByGroupKey.get(groupKey) ?? {};
|
||||
|
||||
if (!valuesByGroupKey.has(groupKey)) {
|
||||
@@ -84,7 +84,7 @@ export const groupByTransformer: DataTransformerInfo<GroupByTransformerOptions>
|
||||
};
|
||||
}
|
||||
|
||||
valuesByField[fieldName].values.add(field.values.get(rowIndex));
|
||||
valuesByField[fieldName].values.push(field.values[rowIndex]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ export const groupByTransformer: DataTransformerInfo<GroupByTransformerOptions>
|
||||
const fieldName = getFieldDisplayName(field);
|
||||
|
||||
valuesByGroupKey.forEach((value) => {
|
||||
values.add(value[fieldName].values.get(0));
|
||||
values.push(value[fieldName].values[0]);
|
||||
});
|
||||
|
||||
fields.push({
|
||||
|
||||
@@ -64,9 +64,9 @@ export const groupingToMatrixTransformer: DataTransformerInfo<GroupingToMatrixTr
|
||||
const matrixValues: { [key: string]: { [key: string]: any } } = {};
|
||||
|
||||
for (let index = 0; index < valueField.values.length; index++) {
|
||||
const columnName = keyColumnField.values.get(index);
|
||||
const rowName = keyRowField.values.get(index);
|
||||
const value = valueField.values.get(index);
|
||||
const columnName = keyColumnField.values[index];
|
||||
const rowName = keyRowField.values[index];
|
||||
const value = valueField.values[index];
|
||||
|
||||
if (!matrixValues[columnName]) {
|
||||
matrixValues[columnName] = {};
|
||||
@@ -115,7 +115,7 @@ function uniqueValues(values: Vector): any[] {
|
||||
const unique = new Set();
|
||||
|
||||
for (let index = 0; index < values.length; index++) {
|
||||
unique.add(values.get(index));
|
||||
unique.add(values[index]);
|
||||
}
|
||||
|
||||
return Array.from(unique);
|
||||
|
||||
@@ -26,7 +26,7 @@ describe('histogram frames frames', () => {
|
||||
expect(
|
||||
out.fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
config: f.config,
|
||||
}))
|
||||
).toMatchInlineSnapshot(`
|
||||
@@ -140,7 +140,7 @@ describe('histogram frames frames', () => {
|
||||
expect(
|
||||
out2.fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
|
||||
@@ -151,7 +151,7 @@ export function getHistogramFields(frame: DataFrame): HistogramFields | undefine
|
||||
|
||||
// guess bucket size from single explicit bucket field
|
||||
if (!xMax && xMin && xMin.values.length > 1) {
|
||||
let vals = xMin.values.toArray();
|
||||
let vals = xMin.values;
|
||||
let bucketSize = roundDecimals(vals[1] - vals[0], 6);
|
||||
|
||||
xMax = {
|
||||
@@ -162,7 +162,7 @@ export function getHistogramFields(frame: DataFrame): HistogramFields | undefine
|
||||
}
|
||||
|
||||
if (!xMin && xMax && xMax?.values.length > 1) {
|
||||
let vals = xMax.values.toArray();
|
||||
let vals = xMax.values;
|
||||
let bucketSize = roundDecimals(vals[1] - vals[0], 6);
|
||||
|
||||
xMin = {
|
||||
@@ -199,7 +199,7 @@ export function buildHistogram(frames: DataFrame[], options?: HistogramTransform
|
||||
for (const frame of frames) {
|
||||
for (const field of frame.fields) {
|
||||
if (field.type === FieldType.number) {
|
||||
allValues = allValues.concat(field.values.toArray());
|
||||
allValues = allValues.concat(field.values);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -253,7 +253,7 @@ export function buildHistogram(frames: DataFrame[], options?: HistogramTransform
|
||||
for (const frame of frames) {
|
||||
for (const field of frame.fields) {
|
||||
if (field.type === FieldType.number) {
|
||||
let fieldHist = histogram(field.values.toArray(), getBucket, histFilter, histSort) as AlignedData;
|
||||
let fieldHist = histogram(field.values, getBucket, histFilter, histSort) as AlignedData;
|
||||
histograms.push(fieldHist);
|
||||
counts.push({
|
||||
...field,
|
||||
|
||||
@@ -32,7 +32,7 @@ describe('align frames', () => {
|
||||
expect(
|
||||
out.fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
@@ -85,7 +85,7 @@ describe('align frames', () => {
|
||||
expect(
|
||||
out.fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
@@ -149,7 +149,7 @@ describe('align frames', () => {
|
||||
expect(
|
||||
out.fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
state: f.state,
|
||||
}))
|
||||
).toMatchInlineSnapshot(`
|
||||
@@ -242,7 +242,7 @@ describe('align frames', () => {
|
||||
expect(
|
||||
out.fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
@@ -289,7 +289,7 @@ describe('align frames', () => {
|
||||
expect(
|
||||
out.fields.map((f) => ({
|
||||
name: f.name,
|
||||
values: f.values.toArray(),
|
||||
values: f.values,
|
||||
}))
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
|
||||
@@ -207,10 +207,10 @@ export function joinDataFrames(options: JoinOptions): DataFrame | undefined {
|
||||
}
|
||||
|
||||
nullModes.push(nullModesFrame);
|
||||
const a: AlignedData = [join.values.toArray()]; //
|
||||
const a: AlignedData = [join.values]; //
|
||||
|
||||
for (const field of fields) {
|
||||
a.push(field.values.toArray());
|
||||
a.push(field.values);
|
||||
originalFields.push(field);
|
||||
// clear field displayName state
|
||||
delete field.state?.displayName;
|
||||
|
||||
@@ -259,7 +259,7 @@ describe('Labels as Columns', () => {
|
||||
});
|
||||
|
||||
await expect(transformDataFrame([cfg], [source])).toEmitValuesWith((received) => {
|
||||
expect(received[0][0].fields.map((f) => ({ [f.name]: f.values.toArray() }))).toMatchInlineSnapshot(`
|
||||
expect(received[0][0].fields.map((f) => ({ [f.name]: f.values }))).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"time": [
|
||||
@@ -346,9 +346,8 @@ describe('Labels as Columns', () => {
|
||||
});
|
||||
|
||||
await expect(transformDataFrame([cfg], [source])).toEmitValuesWith((received) => {
|
||||
expect(
|
||||
received[0].map((f) => ({ name: f.name, fields: f.fields.map((v) => ({ [v.name]: v.values.toArray() })) }))
|
||||
).toMatchInlineSnapshot(`
|
||||
expect(received[0].map((f) => ({ name: f.name, fields: f.fields.map((v) => ({ [v.name]: v.values })) })))
|
||||
.toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"fields": [
|
||||
@@ -410,9 +409,8 @@ describe('Labels as Columns', () => {
|
||||
});
|
||||
|
||||
await expect(transformDataFrame([cfg], [source])).toEmitValuesWith((received) => {
|
||||
expect(
|
||||
received[0].map((f) => ({ name: f.name, fields: f.fields.map((v) => ({ [v.name]: v.values.toArray() })) }))
|
||||
).toMatchInlineSnapshot(`
|
||||
expect(received[0].map((f) => ({ name: f.name, fields: f.fields.map((v) => ({ [v.name]: v.values })) })))
|
||||
.toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"fields": [
|
||||
@@ -457,7 +455,7 @@ describe('Labels as Columns', () => {
|
||||
function toSimpleObject(frame: DataFrame) {
|
||||
const obj: Record<string, unknown> = {};
|
||||
for (const field of frame.fields) {
|
||||
obj[field.name] = field.values.toArray();
|
||||
obj[field.name] = field.values;
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
@@ -595,7 +595,5 @@ const createField = (
|
||||
};
|
||||
|
||||
const unwrap = (fields: Field[]): Field[] => {
|
||||
return fields.map((field) =>
|
||||
createField(field.name, field.type, field.values.toArray(), field.config, field.display)
|
||||
);
|
||||
return fields.map((field) => createField(field.name, field.type, field.values, field.config, field.display));
|
||||
};
|
||||
|
||||
@@ -95,5 +95,5 @@ describe('SortBy transformer', () => {
|
||||
});
|
||||
|
||||
function getFieldSnapshot(f: Field): Object {
|
||||
return { name: f.name, values: f.values.toArray() };
|
||||
return { name: f.name, values: f.values };
|
||||
}
|
||||
|
||||
@@ -308,7 +308,7 @@ export function toCSV(data: DataFrame[], config?: CSVConfig): string {
|
||||
csv = csv + config.delimiter;
|
||||
}
|
||||
|
||||
const v = fields[j].values.get(i);
|
||||
const v = fields[j].values[i];
|
||||
if (v !== null) {
|
||||
csv = csv + writers[j](v);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { DataLink, FieldType, TimeRange } from '../types';
|
||||
import { ArrayVector } from '../vector';
|
||||
|
||||
import { mapInternalLinkToExplore } from './dataLinks';
|
||||
|
||||
@@ -24,7 +23,7 @@ describe('mapInternalLinkToExplore', () => {
|
||||
name: 'test',
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
values: new ArrayVector([2]),
|
||||
values: [2],
|
||||
},
|
||||
replaceVariables: (val) => val,
|
||||
});
|
||||
@@ -65,7 +64,7 @@ describe('mapInternalLinkToExplore', () => {
|
||||
name: 'test',
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
values: new ArrayVector([2]),
|
||||
values: [2],
|
||||
},
|
||||
replaceVariables: (val) => val,
|
||||
});
|
||||
@@ -112,7 +111,7 @@ describe('mapInternalLinkToExplore', () => {
|
||||
name: 'test',
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
values: new ArrayVector([2]),
|
||||
values: [2],
|
||||
},
|
||||
replaceVariables: (val, scopedVars) => val.replace(/\$var/g, scopedVars!['var1']!.value),
|
||||
});
|
||||
|
||||
@@ -1,25 +1,24 @@
|
||||
import { Field, FieldType } from '../types';
|
||||
import { ArrayVector } from '../vector';
|
||||
|
||||
import { getSeriesTimeStep, hasMsResolution } from './series';
|
||||
|
||||
const uniformTimeField: Field = {
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([0, 100, 200, 300]),
|
||||
values: [0, 100, 200, 300],
|
||||
config: {},
|
||||
};
|
||||
const nonUniformTimeField: Field = {
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([0, 100, 300, 350]),
|
||||
values: [0, 100, 300, 350],
|
||||
config: {},
|
||||
};
|
||||
|
||||
const msResolutionTimeField: Field = {
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([0, 1572951685007, 300, 350]),
|
||||
values: [0, 1572951685007, 300, 350],
|
||||
config: {},
|
||||
};
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ export const getSeriesTimeStep = (timeField: Field): number => {
|
||||
let returnTimeStep = Number.MAX_VALUE;
|
||||
|
||||
for (let i = 0; i < timeField.values.length; i++) {
|
||||
const currentTime = timeField.values.get(i);
|
||||
const currentTime = timeField.values[i];
|
||||
|
||||
if (previousTime !== undefined) {
|
||||
const timeStep = currentTime - previousTime;
|
||||
@@ -34,7 +34,7 @@ export const getSeriesTimeStep = (timeField: Field): number => {
|
||||
*/
|
||||
export const hasMsResolution = (timeField: Field) => {
|
||||
for (let i = 0; i < timeField.values.length; i++) {
|
||||
const value = timeField.values.get(i);
|
||||
const value = timeField.values[i];
|
||||
if (value !== null && value !== undefined) {
|
||||
const timestamp = value.toString();
|
||||
if (timestamp.length === 13 && timestamp % 1000 !== 0) {
|
||||
|
||||
@@ -13,6 +13,8 @@ interface AppendedVectorInfo<T> {
|
||||
* This may be more trouble than it is worth. This trades some computation time for
|
||||
* RAM -- rather than allocate a new array the size of all previous arrays, this just
|
||||
* points the correct index to their original array values
|
||||
*
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export class AppendedVectors<T = any> extends FunctionalVector<T> {
|
||||
length = 0;
|
||||
@@ -60,7 +62,7 @@ export class AppendedVectors<T = any> extends FunctionalVector<T> {
|
||||
for (let i = 0; i < this.source.length; i++) {
|
||||
const src = this.source[i];
|
||||
if (index >= src.start && index < src.end) {
|
||||
return src.values.get(index - src.start);
|
||||
return src.values[index - src.start];
|
||||
}
|
||||
}
|
||||
return undefined as unknown as T;
|
||||
|
||||
@@ -6,6 +6,7 @@ import { FunctionalVector } from './FunctionalVector';
|
||||
* This will force all values to be numbers
|
||||
*
|
||||
* @public
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export class AsNumberVector extends FunctionalVector<number> {
|
||||
constructor(private field: Vector) {
|
||||
|
||||
@@ -6,6 +6,7 @@ import { vectorToArray } from './vectorToArray';
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export class BinaryOperationVector extends FunctionalVector<number> {
|
||||
constructor(private left: Vector<number>, private right: Vector<number>, private operation: BinaryOperation) {
|
||||
|
||||
@@ -14,6 +14,7 @@ interface CircularOptions<T> {
|
||||
* to match a configured capacity.
|
||||
*
|
||||
* @public
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export class CircularVector<T = any> extends FunctionalVector<T> {
|
||||
private buffer: T[];
|
||||
|
||||
@@ -2,6 +2,7 @@ import { FunctionalVector } from './FunctionalVector';
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export class ConstantVector<T = any> extends FunctionalVector<T> {
|
||||
constructor(private value: T, private len: number) {
|
||||
|
||||
@@ -6,6 +6,7 @@ import { FunctionalVector } from './FunctionalVector';
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export class FormattedVector<T = any> extends FunctionalVector<string> {
|
||||
constructor(private source: Vector<T>, private formatter: DisplayProcessor) {
|
||||
|
||||
@@ -2,7 +2,10 @@ import { Vector } from '../types';
|
||||
|
||||
import { vectorToArray } from './vectorToArray';
|
||||
|
||||
/** @public */
|
||||
/**
|
||||
* @public
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export abstract class FunctionalVector<T = any> implements Vector<T> {
|
||||
abstract get length(): number;
|
||||
|
||||
@@ -187,6 +190,8 @@ const emptyarray: any[] = [];
|
||||
|
||||
/**
|
||||
* Use functional programming with your vector
|
||||
*
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export function vectorator<T>(vector: Vector<T>) {
|
||||
return {
|
||||
|
||||
@@ -6,6 +6,7 @@ import { vectorToArray } from './vectorToArray';
|
||||
/**
|
||||
* RowVector makes the row values look like a vector
|
||||
* @internal
|
||||
* @deprecated use a simple Arrays
|
||||
*/
|
||||
export class RowVector extends FunctionalVector<number> {
|
||||
constructor(private columns: Vector[]) {
|
||||
|
||||
@@ -95,7 +95,7 @@ describe('Heatmap transformer', () => {
|
||||
"type": "heatmap-cells",
|
||||
}
|
||||
`);
|
||||
expect(heatmap.fields[1].values.toArray()).toMatchInlineSnapshot(`
|
||||
expect(heatmap.fields[1].values).toMatchInlineSnapshot(`
|
||||
[
|
||||
0,
|
||||
1,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { map } from 'rxjs';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
FieldType,
|
||||
@@ -104,7 +103,7 @@ export interface RowsHeatmapOptions {
|
||||
export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
|
||||
// TODO: handle null-filling w/ fields[0].config.interval?
|
||||
const xField = opts.frame.fields[0];
|
||||
const xValues = xField.values.toArray();
|
||||
const xValues = xField.values;
|
||||
const yFields = opts.frame.fields.filter((f, idx) => f.type === FieldType.number && idx > 0);
|
||||
|
||||
// similar to initBins() below
|
||||
@@ -113,7 +112,7 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
|
||||
const ys = new Array(len);
|
||||
const counts2 = new Array(len);
|
||||
|
||||
const counts = yFields.map((field) => field.values.toArray().slice());
|
||||
const counts = yFields.map((field) => field.values.slice());
|
||||
|
||||
// transpose
|
||||
counts.forEach((bucketCounts, bi) => {
|
||||
@@ -197,13 +196,13 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
|
||||
{
|
||||
name: xField.type === FieldType.time ? 'xMax' : 'x',
|
||||
type: xField.type,
|
||||
values: new ArrayVector(xs),
|
||||
values: xs,
|
||||
config: xField.config,
|
||||
},
|
||||
{
|
||||
name: ordinalFieldName,
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector(ys),
|
||||
values: ys,
|
||||
config: {
|
||||
unit: 'short', // ordinal lookup
|
||||
},
|
||||
@@ -211,7 +210,7 @@ export function rowsToCellsHeatmap(opts: RowsHeatmapOptions): DataFrame {
|
||||
{
|
||||
name: opts.value?.length ? opts.value : 'Value',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector(counts2),
|
||||
values: counts2,
|
||||
config: valueCfg,
|
||||
display: yFields[0].display,
|
||||
},
|
||||
@@ -229,7 +228,7 @@ export function prepBucketFrames(frames: DataFrame[]): DataFrame[] {
|
||||
frames.sort((a, b) => sortAscStrInf(a.name, b.name));
|
||||
|
||||
// cumulative counts
|
||||
const counts = frames.map((frame) => frame.fields[1].values.toArray().slice());
|
||||
const counts = frames.map((frame) => frame.fields[1].values.slice());
|
||||
|
||||
// de-accumulate
|
||||
counts.reverse();
|
||||
@@ -248,7 +247,7 @@ export function prepBucketFrames(frames: DataFrame[]): DataFrame[] {
|
||||
frame.fields[0],
|
||||
{
|
||||
...frame.fields[1],
|
||||
values: new ArrayVector(counts[i]),
|
||||
values: counts[i],
|
||||
},
|
||||
],
|
||||
}));
|
||||
@@ -289,10 +288,10 @@ export function calculateHeatmapFromData(frames: DataFrame[], options: HeatmapCa
|
||||
xField = x; // the first X
|
||||
}
|
||||
|
||||
const xValues = x.values.toArray();
|
||||
const xValues = x.values;
|
||||
for (let field of frame.fields) {
|
||||
if (field !== x && field.type === FieldType.number) {
|
||||
const yValues = field.values.toArray();
|
||||
const yValues = field.values;
|
||||
|
||||
for (let i = 0; i < xValues.length; i++, j++) {
|
||||
xs[j] = xValues[i];
|
||||
@@ -350,13 +349,13 @@ export function calculateHeatmapFromData(frames: DataFrame[], options: HeatmapCa
|
||||
{
|
||||
name: 'xMin',
|
||||
type: xField.type,
|
||||
values: new ArrayVector(heat2d.x),
|
||||
values: heat2d.x,
|
||||
config: xField.config,
|
||||
},
|
||||
{
|
||||
name: 'yMin',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector(heat2d.y),
|
||||
values: heat2d.y,
|
||||
config: {
|
||||
...yField.config, // keep units from the original source
|
||||
custom: {
|
||||
@@ -367,7 +366,7 @@ export function calculateHeatmapFromData(frames: DataFrame[], options: HeatmapCa
|
||||
{
|
||||
name: 'Count',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector(heat2d.count),
|
||||
values: heat2d.count,
|
||||
config: {
|
||||
unit: 'short', // always integer
|
||||
},
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
DataTransformerInfo,
|
||||
@@ -51,7 +50,7 @@ export function extractConfigFromQuery(options: ConfigFromQueryTransformOptions,
|
||||
const fieldName = getFieldDisplayName(field, configFrame);
|
||||
const fieldMapping = mappingResult.index[fieldName];
|
||||
const result = reduceField({ field, reducers: [fieldMapping.reducerId] });
|
||||
newField.values = new ArrayVector([result[fieldMapping.reducerId]]);
|
||||
newField.values = [result[fieldMapping.reducerId]];
|
||||
reducedConfigFrame.fields.push(newField);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ArrayVector, DataFrame, Field, FieldType } from '@grafana/data';
|
||||
import { DataFrame, Field, FieldType } from '@grafana/data';
|
||||
import { toDataFrame } from '@grafana/data/src/dataframe/processDataFrame';
|
||||
|
||||
import { extractFieldsTransformer } from './extractFields';
|
||||
@@ -193,28 +193,28 @@ const testFieldTime: Field = {
|
||||
config: {},
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([1669638911691]),
|
||||
values: [1669638911691],
|
||||
};
|
||||
|
||||
const testFieldString: Field = {
|
||||
config: {},
|
||||
name: 'String',
|
||||
type: FieldType.string,
|
||||
values: new ArrayVector(['Hallo World']),
|
||||
values: ['Hallo World'],
|
||||
};
|
||||
|
||||
const testFieldJSON: Field = {
|
||||
config: {},
|
||||
name: 'JSON',
|
||||
type: FieldType.string,
|
||||
values: new ArrayVector([
|
||||
values: [
|
||||
JSON.stringify({
|
||||
object: {
|
||||
nestedArray: [1, 2, 3, 4],
|
||||
nestedString: 'Hallo World',
|
||||
},
|
||||
}),
|
||||
]),
|
||||
],
|
||||
};
|
||||
|
||||
const testDataFrame: DataFrame = {
|
||||
|
||||
@@ -2,7 +2,6 @@ import { isString, get } from 'lodash';
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
Field,
|
||||
@@ -53,7 +52,7 @@ function addExtractedFields(frame: DataFrame, options: ExtractFieldsOptions): Da
|
||||
const values = new Map<string, any[]>();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
let obj = source.values.get(i);
|
||||
let obj = source.values[i];
|
||||
|
||||
if (isString(obj)) {
|
||||
try {
|
||||
@@ -93,7 +92,7 @@ function addExtractedFields(frame: DataFrame, options: ExtractFieldsOptions): Da
|
||||
const buffer = values.get(name);
|
||||
return {
|
||||
name,
|
||||
values: new ArrayVector(buffer),
|
||||
values: buffer,
|
||||
type: buffer ? getFieldTypeFromValue(buffer.find((v) => v != null)) : FieldType.other,
|
||||
config: {},
|
||||
} as Field;
|
||||
|
||||
@@ -142,8 +142,8 @@ describe('Join by labels', () => {
|
||||
|
||||
function toRowsSnapshow(frame: DataFrame) {
|
||||
const columns = frame.fields.map((f) => f.name);
|
||||
const rows = frame.fields[0].values.toArray().map((v, idx) => {
|
||||
return frame.fields.map((f) => f.values.get(idx));
|
||||
const rows = frame.fields[0].values.map((v, idx) => {
|
||||
return frame.fields.map((f) => f.values[idx]);
|
||||
});
|
||||
return {
|
||||
columns,
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
Field,
|
||||
FieldType,
|
||||
SynchronousDataTransformerInfo,
|
||||
} from '@grafana/data';
|
||||
import { DataFrame, DataTransformerID, Field, FieldType, SynchronousDataTransformerInfo } from '@grafana/data';
|
||||
|
||||
import { getDistinctLabels } from '../utils';
|
||||
|
||||
@@ -72,7 +65,7 @@ export function joinByLabels(options: JoinByLabelsTransformOptions, data: DataFr
|
||||
found.set(key, item);
|
||||
}
|
||||
const name = field.labels[options.value];
|
||||
const vals = field.values.toArray();
|
||||
const vals = field.values;
|
||||
const old = item.values[name];
|
||||
if (old) {
|
||||
item.values[name] = old.concat(vals);
|
||||
@@ -117,7 +110,7 @@ export function joinByLabels(options: JoinByLabelsTransformOptions, data: DataFr
|
||||
name: join[i],
|
||||
config: {},
|
||||
type: FieldType.string,
|
||||
values: new ArrayVector(joinValues[i]),
|
||||
values: joinValues[i],
|
||||
});
|
||||
}
|
||||
|
||||
@@ -127,7 +120,7 @@ export function joinByLabels(options: JoinByLabelsTransformOptions, data: DataFr
|
||||
name: allNames[i],
|
||||
config: {},
|
||||
type: old.type ?? FieldType.number,
|
||||
values: new ArrayVector(nameValues[i]),
|
||||
values: nameValues[i],
|
||||
});
|
||||
}
|
||||
|
||||
@@ -139,7 +132,7 @@ function getErrorFrame(text: string): DataFrame {
|
||||
meta: {
|
||||
notices: [{ severity: 'error', text }],
|
||||
},
|
||||
fields: [{ name: 'Error', type: FieldType.string, config: {}, values: new ArrayVector([text]) }],
|
||||
fields: [{ name: 'Error', type: FieldType.string, config: {}, values: [text] }],
|
||||
length: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ export function addFieldsFromGazetteer(frames: DataFrame[], gaz: Gazetteer, matc
|
||||
const found = gaz.find(values[v]);
|
||||
if (found?.index != null) {
|
||||
for (let i = 0; i < src.length; i++) {
|
||||
sub[i][v] = src[i].values.get(found.index);
|
||||
sub[i][v] = src[i].values[found.index];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,15 +35,15 @@ describe('Partition by values transformer', () => {
|
||||
expect(partitioned[0].name).toEqual('Europe');
|
||||
expect(partitioned[0].fields[0].name).toEqual('model');
|
||||
expect(partitioned[0].fields[1].name).toEqual('region');
|
||||
expect(partitioned[0].fields[0].values.toArray()).toEqual(['E1', 'E2', 'E3']);
|
||||
expect(partitioned[0].fields[1].values.toArray()).toEqual(['Europe', 'Europe', 'Europe']);
|
||||
expect(partitioned[0].fields[0].values).toEqual(['E1', 'E2', 'E3']);
|
||||
expect(partitioned[0].fields[1].values).toEqual(['Europe', 'Europe', 'Europe']);
|
||||
|
||||
expect(partitioned[1].length).toEqual(3);
|
||||
expect(partitioned[1].name).toEqual('China');
|
||||
expect(partitioned[1].fields[0].name).toEqual('model');
|
||||
expect(partitioned[1].fields[1].name).toEqual('region');
|
||||
expect(partitioned[1].fields[0].values.toArray()).toEqual(['C1', 'C2', 'C3']);
|
||||
expect(partitioned[1].fields[1].values.toArray()).toEqual(['China', 'China', 'China']);
|
||||
expect(partitioned[1].fields[0].values).toEqual(['C1', 'C2', 'C3']);
|
||||
expect(partitioned[1].fields[1].values).toEqual(['China', 'China', 'China']);
|
||||
});
|
||||
|
||||
it('should partition by multiple fields', () => {
|
||||
@@ -76,36 +76,36 @@ describe('Partition by values transformer', () => {
|
||||
expect(partitioned[0].fields[0].name).toEqual('model');
|
||||
expect(partitioned[0].fields[1].name).toEqual('region');
|
||||
expect(partitioned[0].fields[2].name).toEqual('status');
|
||||
expect(partitioned[0].fields[0].values.toArray()).toEqual(['E1']);
|
||||
expect(partitioned[0].fields[1].values.toArray()).toEqual(['Europe']);
|
||||
expect(partitioned[0].fields[2].values.toArray()).toEqual(['OK']);
|
||||
expect(partitioned[0].fields[0].values).toEqual(['E1']);
|
||||
expect(partitioned[0].fields[1].values).toEqual(['Europe']);
|
||||
expect(partitioned[0].fields[2].values).toEqual(['OK']);
|
||||
|
||||
expect(partitioned[1].length).toEqual(2);
|
||||
expect(partitioned[1].name).toEqual('Europe FAIL');
|
||||
expect(partitioned[1].fields[0].name).toEqual('model');
|
||||
expect(partitioned[1].fields[1].name).toEqual('region');
|
||||
expect(partitioned[1].fields[2].name).toEqual('status');
|
||||
expect(partitioned[1].fields[0].values.toArray()).toEqual(['E2', 'E3']);
|
||||
expect(partitioned[1].fields[1].values.toArray()).toEqual(['Europe', 'Europe']);
|
||||
expect(partitioned[1].fields[2].values.toArray()).toEqual(['FAIL', 'FAIL']);
|
||||
expect(partitioned[1].fields[0].values).toEqual(['E2', 'E3']);
|
||||
expect(partitioned[1].fields[1].values).toEqual(['Europe', 'Europe']);
|
||||
expect(partitioned[1].fields[2].values).toEqual(['FAIL', 'FAIL']);
|
||||
|
||||
expect(partitioned[2].length).toEqual(2);
|
||||
expect(partitioned[2].name).toEqual('China OK');
|
||||
expect(partitioned[2].fields[0].name).toEqual('model');
|
||||
expect(partitioned[2].fields[1].name).toEqual('region');
|
||||
expect(partitioned[2].fields[2].name).toEqual('status');
|
||||
expect(partitioned[2].fields[0].values.toArray()).toEqual(['C1', 'C2']);
|
||||
expect(partitioned[2].fields[1].values.toArray()).toEqual(['China', 'China']);
|
||||
expect(partitioned[2].fields[2].values.toArray()).toEqual(['OK', 'OK']);
|
||||
expect(partitioned[2].fields[0].values).toEqual(['C1', 'C2']);
|
||||
expect(partitioned[2].fields[1].values).toEqual(['China', 'China']);
|
||||
expect(partitioned[2].fields[2].values).toEqual(['OK', 'OK']);
|
||||
|
||||
expect(partitioned[3].length).toEqual(1);
|
||||
expect(partitioned[3].name).toEqual('China FAIL');
|
||||
expect(partitioned[3].fields[0].name).toEqual('model');
|
||||
expect(partitioned[3].fields[1].name).toEqual('region');
|
||||
expect(partitioned[3].fields[2].name).toEqual('status');
|
||||
expect(partitioned[3].fields[0].values.toArray()).toEqual(['C3']);
|
||||
expect(partitioned[3].fields[1].values.toArray()).toEqual(['China']);
|
||||
expect(partitioned[3].fields[2].values.toArray()).toEqual(['FAIL']);
|
||||
expect(partitioned[3].fields[0].values).toEqual(['C3']);
|
||||
expect(partitioned[3].fields[1].values).toEqual(['China']);
|
||||
expect(partitioned[3].fields[2].values).toEqual(['FAIL']);
|
||||
});
|
||||
|
||||
it('should partition by multiple fields with custom frame naming {withNames: true}', () => {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { map } from 'rxjs';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
SynchronousDataTransformerInfo,
|
||||
@@ -99,7 +98,7 @@ export function partitionByValues(
|
||||
options?: PartitionByValuesTransformerOptions
|
||||
): DataFrame[] {
|
||||
const keyFields = frame.fields.filter((f) => matcher(f, frame, [frame]))!;
|
||||
const keyFieldsVals = keyFields.map((f) => f.values.toArray());
|
||||
const keyFieldsVals = keyFields.map((f) => f.values);
|
||||
const names = keyFields.map((f) => f.name);
|
||||
|
||||
const frameNameOpts = {
|
||||
@@ -142,7 +141,7 @@ export function partitionByValues(
|
||||
meta: frame.meta,
|
||||
length: idxs.length,
|
||||
fields: filteredFields.map((f) => {
|
||||
const vals = f.values.toArray();
|
||||
const vals = f.values;
|
||||
const vals2 = Array(idxs.length);
|
||||
|
||||
for (let i = 0; i < idxs.length; i++) {
|
||||
@@ -157,7 +156,7 @@ export function partitionByValues(
|
||||
...f.labels,
|
||||
...fieldLabels,
|
||||
},
|
||||
values: new ArrayVector(vals2),
|
||||
values: vals2,
|
||||
};
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -84,8 +84,8 @@ describe('Prepare time series transformer', () => {
|
||||
frames.map((f) => ({
|
||||
name: getFrameDisplayName(f),
|
||||
labels: f.fields[1].labels,
|
||||
time: f.fields[0].values.toArray(),
|
||||
values: f.fields[1].values.toArray(),
|
||||
time: f.fields[0].values,
|
||||
values: f.fields[1].values,
|
||||
}))
|
||||
).toMatchInlineSnapshot(`
|
||||
[
|
||||
|
||||
@@ -78,13 +78,13 @@ export function toTimeSeriesMulti(data: DataFrame[]): DataFrame[] {
|
||||
};
|
||||
const builders = new Map<string, frameBuilder>();
|
||||
for (let i = 0; i < frame.length; i++) {
|
||||
const time = timeField.values.get(i);
|
||||
const value = field.values.get(i);
|
||||
const time = timeField.values[i];
|
||||
const value = field.values[i];
|
||||
if (value === undefined || time == null) {
|
||||
continue; // skip values left over from join
|
||||
}
|
||||
|
||||
const key = labelFields.map((f) => f.values.get(i)).join('/');
|
||||
const key = labelFields.map((f) => f.values[i]).join('/');
|
||||
let builder = builders.get(key);
|
||||
if (!builder) {
|
||||
builder = {
|
||||
@@ -94,7 +94,7 @@ export function toTimeSeriesMulti(data: DataFrame[]): DataFrame[] {
|
||||
labels: {},
|
||||
};
|
||||
for (const label of labelFields) {
|
||||
builder.labels[label.name] = label.values.get(i);
|
||||
builder.labels[label.name] = label.values[i];
|
||||
}
|
||||
builders.set(key, builder);
|
||||
}
|
||||
@@ -218,7 +218,7 @@ export function toTimeSeriesLong(data: DataFrame[]): DataFrame[] {
|
||||
const uniqueFactorNamesWithWideIndices: string[] = [];
|
||||
|
||||
for (let wideRowIndex = 0; wideRowIndex < frame.length; wideRowIndex++) {
|
||||
sortedTimeRowIndices.push({ time: timeField.values.get(wideRowIndex), wideRowIndex: wideRowIndex });
|
||||
sortedTimeRowIndices.push({ time: timeField.values[wideRowIndex], wideRowIndex: wideRowIndex });
|
||||
}
|
||||
|
||||
for (const labelKeys in labelKeyToWideIndices) {
|
||||
@@ -257,7 +257,7 @@ export function toTimeSeriesLong(data: DataFrame[]): DataFrame[] {
|
||||
const rowValues: Record<string, any> = {};
|
||||
|
||||
for (const name of uniqueFactorNamesWithWideIndices) {
|
||||
rowValues[name] = frame.fields[uniqueFactorNamesToWideIndex[name]].values.get(wideRowIndex);
|
||||
rowValues[name] = frame.fields[uniqueFactorNamesToWideIndex[name]].values[wideRowIndex];
|
||||
}
|
||||
|
||||
let index = 0;
|
||||
@@ -271,7 +271,7 @@ export function toTimeSeriesLong(data: DataFrame[]): DataFrame[] {
|
||||
}
|
||||
}
|
||||
|
||||
rowValues[wideField.name] = wideField.values.get(wideRowIndex);
|
||||
rowValues[wideField.name] = wideField.values[wideRowIndex];
|
||||
}
|
||||
|
||||
rowValues[timeField.name] = time;
|
||||
|
||||
@@ -83,7 +83,7 @@ describe('Rows to fields', () => {
|
||||
|
||||
expect(result.fields[0].name).toBe('Temperature');
|
||||
expect(result.fields[0].config).toEqual({});
|
||||
expect(result.fields[0].values.get(0)).toBe(100);
|
||||
expect(result.fields[0].values[0]).toBe(100);
|
||||
});
|
||||
|
||||
it('Can handle colors', () => {
|
||||
@@ -150,7 +150,7 @@ describe('Rows to fields', () => {
|
||||
);
|
||||
|
||||
expect(result.fields[0].name).toEqual('Stockholm');
|
||||
expect(result.fields[0].values.get(0)).toEqual(20);
|
||||
expect(result.fields[0].values[0]).toEqual(20);
|
||||
});
|
||||
|
||||
it('Can handle number fields as name field', () => {
|
||||
@@ -172,6 +172,6 @@ describe('Rows to fields', () => {
|
||||
);
|
||||
|
||||
expect(result.fields[0].name).toEqual('10');
|
||||
expect(result.fields[0].values.get(0)).toEqual(1);
|
||||
expect(result.fields[0].values[0]).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,14 +1,6 @@
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
DataTransformerInfo,
|
||||
Field,
|
||||
getFieldDisplayName,
|
||||
Labels,
|
||||
} from '@grafana/data';
|
||||
import { DataFrame, DataTransformerID, DataTransformerInfo, Field, getFieldDisplayName, Labels } from '@grafana/data';
|
||||
|
||||
import {
|
||||
getFieldConfigFromFrame,
|
||||
@@ -53,15 +45,15 @@ export function rowsToFields(options: RowToFieldsTransformOptions, data: DataFra
|
||||
const outFields: Field[] = [];
|
||||
|
||||
for (let index = 0; index < nameField.values.length; index++) {
|
||||
const name = nameField.values.get(index);
|
||||
const value = valueField.values.get(index);
|
||||
const name = nameField.values[index];
|
||||
const value = valueField.values[index];
|
||||
const config = getFieldConfigFromFrame(data, index, mappingResult);
|
||||
const labels = getLabelsFromRow(data, index, mappingResult);
|
||||
|
||||
const field: Field = {
|
||||
name: `${name}`,
|
||||
type: valueField.type,
|
||||
values: new ArrayVector([value]),
|
||||
values: [value],
|
||||
config: config,
|
||||
labels,
|
||||
};
|
||||
@@ -87,7 +79,7 @@ function getLabelsFromRow(frame: DataFrame, index: number, mappingResult: Evalua
|
||||
continue;
|
||||
}
|
||||
|
||||
const value = field.values.get(index);
|
||||
const value = field.values[index];
|
||||
if (value != null) {
|
||||
labels[fieldName] = value;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { mergeMap, from } from 'rxjs';
|
||||
|
||||
import { ArrayVector, DataFrame, DataTransformerID, DataTransformerInfo, FieldType } from '@grafana/data';
|
||||
import { DataFrame, DataTransformerID, DataTransformerInfo, FieldType } from '@grafana/data';
|
||||
import { createGeometryCollection, createLineBetween } from 'app/features/geo/format/utils';
|
||||
import { getGeometryField, getLocationMatchers } from 'app/features/geo/utils/location';
|
||||
|
||||
@@ -64,7 +64,7 @@ async function doSetGeometry(frames: DataFrame[], options: SpatialTransformOptio
|
||||
...info.field,
|
||||
name,
|
||||
type: FieldType.geo,
|
||||
values: new ArrayVector([toLineString(info.field)]),
|
||||
values: [toLineString(info.field)],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -3,14 +3,14 @@ import { Geometry, LineString, Point } from 'ol/geom';
|
||||
import { toLonLat } from 'ol/proj';
|
||||
import { getArea, getLength } from 'ol/sphere';
|
||||
|
||||
import { ArrayVector, Field, FieldType } from '@grafana/data';
|
||||
import { Field, FieldType } from '@grafana/data';
|
||||
|
||||
import { SpatialCalculation, SpatialCalculationOption } from './models.gen';
|
||||
|
||||
/** Will return a field with a single row */
|
||||
export function toLineString(field: Field<Geometry | undefined>): LineString {
|
||||
const coords: number[][] = [];
|
||||
for (const geo of field.values.toArray()) {
|
||||
for (const geo of field.values) {
|
||||
if (geo) {
|
||||
coords.push(getCenterPoint(geo));
|
||||
}
|
||||
@@ -55,7 +55,7 @@ export function getCenterPointWGS84(geo?: Geometry): number[] | undefined {
|
||||
|
||||
/** Will return a new field with calculated values */
|
||||
export function doGeomeryCalculation(field: Field<Geometry | undefined>, options: SpatialCalculationOption): Field {
|
||||
const values = field.values.toArray();
|
||||
const values = field.values;
|
||||
const buffer = new Array(field.values.length);
|
||||
const op = options.calc ?? SpatialCalculation.Heading;
|
||||
const name = options.field ?? op;
|
||||
@@ -74,7 +74,7 @@ export function doGeomeryCalculation(field: Field<Geometry | undefined>, options
|
||||
config: {
|
||||
unit: 'areaM2',
|
||||
},
|
||||
values: new ArrayVector(buffer),
|
||||
values: buffer,
|
||||
};
|
||||
}
|
||||
case SpatialCalculation.Distance: {
|
||||
@@ -90,7 +90,7 @@ export function doGeomeryCalculation(field: Field<Geometry | undefined>, options
|
||||
config: {
|
||||
unit: 'lengthm',
|
||||
},
|
||||
values: new ArrayVector(buffer),
|
||||
values: buffer,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -103,7 +103,7 @@ export function doGeomeryCalculation(field: Field<Geometry | undefined>, options
|
||||
config: {
|
||||
unit: 'degree',
|
||||
},
|
||||
values: new ArrayVector(calculateBearings(values)),
|
||||
values: calculateBearings(values),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataTransformerID,
|
||||
DataTransformerInfo,
|
||||
@@ -45,7 +44,7 @@ export function timeSeriesToTableTransform(options: TimeSeriesTableTransformerOp
|
||||
// initialize fields from labels for each refId
|
||||
const refId2LabelFields = getLabelFields(data);
|
||||
|
||||
const refId2frameField: Record<string, Field<DataFrame, ArrayVector>> = {};
|
||||
const refId2frameField: Record<string, Field<DataFrame>> = {};
|
||||
|
||||
const result: DataFrame[] = [];
|
||||
|
||||
@@ -65,7 +64,7 @@ export function timeSeriesToTableTransform(options: TimeSeriesTableTransformerOp
|
||||
name: 'Trend' + (refId && Object.keys(refId2LabelFields).length > 1 ? ` #${refId}` : ''),
|
||||
type: FieldType.frame,
|
||||
config: {},
|
||||
values: new ArrayVector(),
|
||||
values: [],
|
||||
};
|
||||
refId2frameField[refId] = frameField;
|
||||
|
||||
@@ -91,9 +90,9 @@ export function timeSeriesToTableTransform(options: TimeSeriesTableTransformerOp
|
||||
}
|
||||
|
||||
// For each refId, initialize a field for each label name
|
||||
function getLabelFields(frames: DataFrame[]): Record<string, Record<string, Field<string, ArrayVector>>> {
|
||||
function getLabelFields(frames: DataFrame[]): Record<string, Record<string, Field<string>>> {
|
||||
// refId -> label name -> field
|
||||
const labelFields: Record<string, Record<string, Field<string, ArrayVector>>> = {};
|
||||
const labelFields: Record<string, Record<string, Field<string>>> = {};
|
||||
|
||||
for (const frame of frames) {
|
||||
if (!isTimeSeriesFrame(frame)) {
|
||||
@@ -117,7 +116,7 @@ function getLabelFields(frames: DataFrame[]): Record<string, Record<string, Fiel
|
||||
name: labelName,
|
||||
type: FieldType.string,
|
||||
config: {},
|
||||
values: new ArrayVector(),
|
||||
values: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user