mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
MutableDataFrame: Remove unique field name constraint and values field index and unused/seldom used stuff (#27573)
* MutableDataFrame needs work * Updated users of values * Update test * Fixed ts issue * Updated tests
This commit is contained in:
parent
5940250480
commit
040cfe074f
@ -14,15 +14,15 @@ describe('Reversing DataFrame', () => {
|
||||
|
||||
const helper = new MutableDataFrame(frame);
|
||||
|
||||
expect(helper.values.time.toArray()).toEqual([100, 200, 300]);
|
||||
expect(helper.values.name.toArray()).toEqual(['a', 'b', 'c']);
|
||||
expect(helper.values.value.toArray()).toEqual([1, 2, 3]);
|
||||
expect(helper.fields[0].values.toArray()).toEqual([100, 200, 300]);
|
||||
expect(helper.fields[1].values.toArray()).toEqual(['a', 'b', 'c']);
|
||||
expect(helper.fields[2].values.toArray()).toEqual([1, 2, 3]);
|
||||
|
||||
helper.reverse();
|
||||
|
||||
expect(helper.values.time.toArray()).toEqual([300, 200, 100]);
|
||||
expect(helper.values.name.toArray()).toEqual(['c', 'b', 'a']);
|
||||
expect(helper.values.value.toArray()).toEqual([3, 2, 1]);
|
||||
expect(helper.fields[0].values.toArray()).toEqual([300, 200, 100]);
|
||||
expect(helper.fields[1].values.toArray()).toEqual(['c', 'b', 'a']);
|
||||
expect(helper.fields[2].values.toArray()).toEqual([3, 2, 1]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -38,7 +38,7 @@ describe('Apending DataFrame', () => {
|
||||
};
|
||||
|
||||
const frame = new MutableDataFrame(dto);
|
||||
expect(frame.values.time.toArray()).toEqual([100, null, null]);
|
||||
expect(frame.fields[0].values.toArray()).toEqual([100, null, null]);
|
||||
|
||||
// Set a value on the second row
|
||||
frame.set(1, { time: 200, name: 'BB', value: 20 });
|
||||
@ -48,23 +48,13 @@ describe('Apending DataFrame', () => {
|
||||
{ time: null, name: null, value: 3 }, // 3
|
||||
]);
|
||||
|
||||
// Set a value on the second row
|
||||
frame.add({ value2: 'XXX' }, true);
|
||||
expect(frame.toArray()).toEqual([
|
||||
{ time: 100, name: 'a', value: 1, value2: null }, // 1
|
||||
{ time: 200, name: 'BB', value: 20, value2: null }, // 2
|
||||
{ time: null, name: null, value: 3, value2: null }, // 3
|
||||
{ time: null, name: null, value: null, value2: 'XXX' }, // 4
|
||||
]);
|
||||
|
||||
// Add a time value that has an array type
|
||||
frame.add({ time: 300 });
|
||||
expect(frame.toArray()).toEqual([
|
||||
{ time: 100, name: 'a', value: 1, value2: null }, // 1
|
||||
{ time: 200, name: 'BB', value: 20, value2: null }, // 2
|
||||
{ time: null, name: null, value: 3, value2: null }, // 3
|
||||
{ time: null, name: null, value: null, value2: 'XXX' }, // 4
|
||||
{ time: 300, name: null, value: null, value2: null }, // 5
|
||||
{ time: 100, name: 'a', value: 1 }, // 1
|
||||
{ time: 200, name: 'BB', value: 20 }, // 2
|
||||
{ time: null, name: null, value: 3 }, // 3
|
||||
{ time: 300, name: null, value: null }, // 5
|
||||
]);
|
||||
|
||||
// Make sure length survives a spread operator
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { Field, DataFrame, DataFrameDTO, FieldDTO, FieldType } from '../types/dataFrame';
|
||||
import { KeyValue, QueryResultMeta } from '../types/data';
|
||||
import { QueryResultMeta } from '../types/data';
|
||||
import { guessFieldTypeFromValue, guessFieldTypeForField, toDataFrameDTO } from './processDataFrame';
|
||||
import isString from 'lodash/isString';
|
||||
import { makeFieldParser } from '../utils/fieldParser';
|
||||
@ -17,9 +17,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
|
||||
name?: string;
|
||||
refId?: string;
|
||||
meta?: QueryResultMeta;
|
||||
|
||||
fields: MutableField[] = [];
|
||||
values: KeyValue<MutableVector> = {};
|
||||
|
||||
private first: Vector = new ArrayVector();
|
||||
private creator: MutableVectorCreator;
|
||||
@ -101,11 +99,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
|
||||
// Make sure it has a name
|
||||
let name = f.name;
|
||||
if (!name) {
|
||||
if (type === FieldType.time) {
|
||||
name = this.values['Time'] ? `Time ${this.fields.length + 1}` : 'Time';
|
||||
} else {
|
||||
name = `Field ${this.fields.length + 1}`;
|
||||
}
|
||||
name = `Field ${this.fields.length + 1}`;
|
||||
}
|
||||
|
||||
const field: MutableField = {
|
||||
@ -126,13 +120,6 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
|
||||
this.fields.push(field);
|
||||
this.first = this.fields[0].values;
|
||||
|
||||
// The Field Already exists
|
||||
if (this.values[name]) {
|
||||
console.warn(`Duplicate field names found: ${name}, only the first will be accessible`);
|
||||
} else {
|
||||
this.values[name] = field.values;
|
||||
}
|
||||
|
||||
// Make sure the field starts with a given length
|
||||
if (startLength) {
|
||||
while (field.values.length < startLength) {
|
||||
@ -159,17 +146,6 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
|
||||
}
|
||||
}
|
||||
|
||||
private addMissingFieldsFor(value: any) {
|
||||
for (const key of Object.keys(value)) {
|
||||
if (!this.values[key]) {
|
||||
this.addField({
|
||||
name: key,
|
||||
type: guessFieldTypeFromValue(value[key]),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reverse all values
|
||||
*/
|
||||
@ -217,11 +193,7 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
|
||||
/**
|
||||
* Add all properties of the value as fields on the frame
|
||||
*/
|
||||
add(value: T, addMissingFields?: boolean) {
|
||||
if (addMissingFields) {
|
||||
this.addMissingFieldsFor(value);
|
||||
}
|
||||
|
||||
add(value: T) {
|
||||
// Will add one value for every field
|
||||
const obj = value as any;
|
||||
for (const field of this.fields) {
|
||||
@ -242,15 +214,11 @@ export class MutableDataFrame<T = any> extends FunctionalVector<T> implements Da
|
||||
}
|
||||
}
|
||||
|
||||
set(index: number, value: T, addMissingFields?: boolean) {
|
||||
set(index: number, value: T) {
|
||||
if (index > this.length) {
|
||||
throw new Error('Unable ot set value beyond current length');
|
||||
}
|
||||
|
||||
if (addMissingFields) {
|
||||
this.addMissingFieldsFor(value);
|
||||
}
|
||||
|
||||
const obj = (value as any) || {};
|
||||
for (const field of this.fields) {
|
||||
field.values.set(index, obj[field.name]);
|
||||
|
@ -7,7 +7,7 @@ import {
|
||||
setDynamicConfigValue,
|
||||
setFieldConfigDefaults,
|
||||
} from './fieldOverrides';
|
||||
import { MutableDataFrame, toDataFrame } from '../dataframe';
|
||||
import { ArrayDataFrame, MutableDataFrame, toDataFrame } from '../dataframe';
|
||||
import {
|
||||
DataFrame,
|
||||
Field,
|
||||
@ -77,11 +77,11 @@ locationUtil.initialize({
|
||||
|
||||
describe('Global MinMax', () => {
|
||||
it('find global min max', () => {
|
||||
const f0 = new MutableDataFrame();
|
||||
f0.add({ title: 'AAA', value: 100, value2: 1234 }, true);
|
||||
f0.add({ title: 'BBB', value: -20 }, true);
|
||||
f0.add({ title: 'CCC', value: 200, value2: 1000 }, true);
|
||||
expect(f0.length).toEqual(3);
|
||||
const f0 = new ArrayDataFrame<{ title: string; value: number; value2: number | null }>([
|
||||
{ title: 'AAA', value: 100, value2: 1234 },
|
||||
{ title: 'BBB', value: -20, value2: null },
|
||||
{ title: 'CCC', value: 200, value2: 1000 },
|
||||
]);
|
||||
|
||||
const minmax = findNumericFieldMinMax([f0]);
|
||||
expect(minmax.min).toEqual(-20);
|
||||
@ -127,11 +127,11 @@ describe('Global MinMax', () => {
|
||||
});
|
||||
|
||||
describe('applyFieldOverrides', () => {
|
||||
const f0 = new MutableDataFrame();
|
||||
f0.add({ title: 'AAA', value: 100, value2: 1234 }, true);
|
||||
f0.add({ title: 'BBB', value: -20 }, true);
|
||||
f0.add({ title: 'CCC', value: 200, value2: 1000 }, true);
|
||||
expect(f0.length).toEqual(3);
|
||||
const f0 = new ArrayDataFrame<{ title: string; value: number; value2: number | null }>([
|
||||
{ title: 'AAA', value: 100, value2: 1234 },
|
||||
{ title: 'BBB', value: -20, value2: null },
|
||||
{ title: 'CCC', value: 200, value2: 1000 },
|
||||
]);
|
||||
|
||||
// Hardcode the max value
|
||||
f0.fields[1].config.max = 0;
|
||||
@ -178,19 +178,19 @@ describe('applyFieldOverrides', () => {
|
||||
});
|
||||
|
||||
expect(withOverrides[0].fields[0].state!.scopedVars).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"__field": Object {
|
||||
"text": "Field",
|
||||
"value": Object {},
|
||||
},
|
||||
"__series": Object {
|
||||
"text": "Series",
|
||||
"value": Object {
|
||||
"name": "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
Object {
|
||||
"__field": Object {
|
||||
"text": "Field",
|
||||
"value": Object {},
|
||||
},
|
||||
"__series": Object {
|
||||
"text": "Series",
|
||||
"value": Object {
|
||||
"name": "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
|
||||
expect(withOverrides[1].fields[0].state!.scopedVars).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { appendTransformer } from './transformers/append';
|
||||
import { reduceTransformer } from './transformers/reduce';
|
||||
import { concatenateTransformer } from './transformers/concat';
|
||||
import { calculateFieldTransformer } from './transformers/calculateField';
|
||||
@ -24,7 +23,6 @@ export const standardTransformers = {
|
||||
filterFramesByRefIdTransformer,
|
||||
orderFieldsTransformer,
|
||||
organizeFieldsTransformer,
|
||||
appendTransformer,
|
||||
reduceTransformer,
|
||||
concatenateTransformer,
|
||||
calculateFieldTransformer,
|
||||
|
@ -1,52 +0,0 @@
|
||||
import { DataTransformerID } from './ids';
|
||||
import { toDataFrame } from '../../dataframe/processDataFrame';
|
||||
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
|
||||
import { appendTransformer } from './append';
|
||||
import { transformDataFrame } from '../transformDataFrame';
|
||||
import { observableTester } from '../../utils/tests/observableTester';
|
||||
|
||||
const seriesAB = toDataFrame({
|
||||
columns: [{ text: 'A' }, { text: 'B' }],
|
||||
rows: [
|
||||
[1, 100], // A,B
|
||||
[2, 200], // A,B
|
||||
],
|
||||
});
|
||||
|
||||
const seriesBC = toDataFrame({
|
||||
columns: [{ text: 'A' }, { text: 'C' }],
|
||||
rows: [
|
||||
[3, 3000], // A,C
|
||||
[4, 4000], // A,C
|
||||
],
|
||||
});
|
||||
|
||||
describe('Append Transformer', () => {
|
||||
beforeAll(() => {
|
||||
mockTransformationsRegistry([appendTransformer]);
|
||||
});
|
||||
|
||||
it('filters by include', done => {
|
||||
const cfg = {
|
||||
id: DataTransformerID.append,
|
||||
options: {},
|
||||
};
|
||||
|
||||
observableTester().subscribeAndExpectOnNext({
|
||||
observable: transformDataFrame([cfg], [seriesAB, seriesBC]),
|
||||
expect: data => {
|
||||
const processed = data[0];
|
||||
expect(processed.fields.length).toBe(3);
|
||||
|
||||
const fieldA = processed.fields[0];
|
||||
const fieldB = processed.fields[1];
|
||||
const fieldC = processed.fields[2];
|
||||
|
||||
expect(fieldA.values.toArray()).toEqual([1, 2, 3, 4]);
|
||||
expect(fieldB.values.toArray()).toEqual([100, 200, null, null]);
|
||||
expect(fieldC.values.toArray()).toEqual([null, null, 3000, 4000]);
|
||||
},
|
||||
done,
|
||||
});
|
||||
});
|
||||
});
|
@ -1,61 +0,0 @@
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import { DataTransformerID } from './ids';
|
||||
import { MutableDataFrame } from '../../dataframe/MutableDataFrame';
|
||||
import { DataTransformerInfo } from '../../types/transformations';
|
||||
|
||||
export interface AppendOptions {}
|
||||
|
||||
export const appendTransformer: DataTransformerInfo<AppendOptions> = {
|
||||
id: DataTransformerID.append,
|
||||
name: 'Append',
|
||||
description: 'Append values into a single DataFrame. This uses the name as the key',
|
||||
defaultOptions: {},
|
||||
|
||||
/**
|
||||
* Return a modified copy of the series. If the transform is not or should not
|
||||
* be applied, just return the input series
|
||||
*/
|
||||
operator: options => source =>
|
||||
source.pipe(
|
||||
map(data => {
|
||||
if (data.length < 2) {
|
||||
return data;
|
||||
}
|
||||
|
||||
// Add the first row
|
||||
const processed = new MutableDataFrame();
|
||||
for (const f of data[0].fields) {
|
||||
processed.addField({
|
||||
...f,
|
||||
values: [...f.values.toArray()],
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 1; i < data.length; i++) {
|
||||
const frame = data[i];
|
||||
const startLength = frame.length;
|
||||
for (let j = 0; j < frame.fields.length; j++) {
|
||||
const src = frame.fields[j];
|
||||
let vals = processed.values[src.name];
|
||||
if (!vals) {
|
||||
vals = processed.addField(
|
||||
{
|
||||
...src,
|
||||
values: [],
|
||||
},
|
||||
startLength
|
||||
).values;
|
||||
}
|
||||
|
||||
// Add each row
|
||||
for (let k = 0; k < frame.length; k++) {
|
||||
vals.add(src.values.get(k));
|
||||
}
|
||||
}
|
||||
processed.validate();
|
||||
}
|
||||
return [processed];
|
||||
})
|
||||
),
|
||||
};
|
@ -104,7 +104,7 @@ export const mergeTransformer: DataTransformerInfo<MergeTransformerOptions> = {
|
||||
const value = valuesByKey[pointer.key][pointer.index];
|
||||
|
||||
if (value) {
|
||||
dataFrame.add(value, false);
|
||||
dataFrame.add(value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,6 +45,7 @@ export class MeasurementCache {
|
||||
addMeasurement(m: Measurement, action: MeasurementAction): DataFrame {
|
||||
const key = m.labels ? formatLabels(m.labels) : '';
|
||||
let frame = this.frames[key];
|
||||
|
||||
if (!frame) {
|
||||
frame = new CircularDataFrame(this.config);
|
||||
frame.name = this.name;
|
||||
@ -52,14 +53,17 @@ export class MeasurementCache {
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
});
|
||||
|
||||
for (const [key, value] of Object.entries(m.values)) {
|
||||
frame.addFieldFor(value, key).labels = m.labels;
|
||||
}
|
||||
|
||||
frame.meta = {
|
||||
custom: {
|
||||
labels: m.labels,
|
||||
},
|
||||
};
|
||||
|
||||
this.frames[key] = frame;
|
||||
}
|
||||
|
||||
@ -71,7 +75,7 @@ export class MeasurementCache {
|
||||
}
|
||||
|
||||
// Add the timestamp
|
||||
frame.values['time'].add(m.time || Date.now());
|
||||
frame.fields[0].values.add(m.time || Date.now());
|
||||
|
||||
// Attach field config to the current fields
|
||||
if (m.config) {
|
||||
@ -85,13 +89,14 @@ export class MeasurementCache {
|
||||
|
||||
// Append all values (a row)
|
||||
for (const [key, value] of Object.entries(m.values)) {
|
||||
let v = frame.values[key];
|
||||
if (!v) {
|
||||
const existingField = frame.fields.find(v => v.name === key);
|
||||
if (!existingField) {
|
||||
const f = frame.addFieldFor(value, key);
|
||||
f.labels = m.labels;
|
||||
v = f.values;
|
||||
f.values.add(value);
|
||||
} else {
|
||||
existingField.values.add(value);
|
||||
}
|
||||
v.add(value);
|
||||
}
|
||||
|
||||
// Make sure all fields have the same length
|
||||
|
@ -121,6 +121,12 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
|
||||
}
|
||||
}
|
||||
|
||||
const tsField = data.fields[0];
|
||||
const tsNsField = data.fields[1];
|
||||
const lineField = data.fields[2];
|
||||
const labelsField = data.fields[3];
|
||||
const idField = data.fields[4];
|
||||
|
||||
for (const stream of streams) {
|
||||
// Find unique labels
|
||||
const unique = findUniqueLabels(stream.stream, baseLabels);
|
||||
@ -131,11 +137,11 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
|
||||
|
||||
// Add each line
|
||||
for (const [ts, line] of stream.values) {
|
||||
data.values.ts.add(new Date(parseInt(ts.substr(0, ts.length - 6), 10)).toISOString());
|
||||
data.values.tsNs.add(ts);
|
||||
data.values.line.add(line);
|
||||
data.values.labels.add(unique);
|
||||
data.values.id.add(createUid(ts, allLabelsString, line));
|
||||
tsField.values.add(new Date(parseInt(ts.substr(0, ts.length - 6), 10)).toISOString());
|
||||
tsNsField.values.add(ts);
|
||||
lineField.values.add(line);
|
||||
labelsField.values.add(unique);
|
||||
idField.values.add(createUid(ts, allLabelsString, line));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -138,8 +138,8 @@ export function runLogsStream(
|
||||
let timeoutId: any = null;
|
||||
|
||||
const pushNextEvent = () => {
|
||||
data.values.time.add(Date.now());
|
||||
data.values.line.add(getRandomLine());
|
||||
data.fields[0].values.add(Date.now());
|
||||
data.fields[1].values.add(getRandomLine());
|
||||
|
||||
subscriber.next({
|
||||
data: [data],
|
||||
|
Loading…
Reference in New Issue
Block a user