mirror of
https://github.com/grafana/grafana.git
synced 2025-01-09 23:53:25 -06:00
DataFrame: split DataFrameHelper into MutableDataFrame and FieldCache (#18795)
* add appending utility * add appending utility * update comment * rename to mutable * move mutable functions out of DataFrameHelper * move mutable functions out of DataFrameHelper * move mutable functions out of DataFrameHelper * turn DataFrameHelper into FieldCache * guess time from name * graph the numbers * return the timeField, not just the index * just warn on duplicate field names * only use a parser if the input is a string * append init all fields to the same length * typo * only parse string if value is a string * DataFrame: test fixes * Switch to null for missing values * Fixed tests
This commit is contained in:
parent
13f55bc5e8
commit
c777301535
@ -63,11 +63,11 @@ export interface Vector<T = any> {
|
||||
toJSON(): any; // same results as toArray()
|
||||
}
|
||||
|
||||
export interface Field<T = any> {
|
||||
export interface Field<T = any, V = Vector<T>> {
|
||||
name: string; // The column name
|
||||
type: FieldType;
|
||||
config: FieldConfig;
|
||||
values: Vector<T>; // `buffer` when JSON
|
||||
values: V; // The raw field values
|
||||
|
||||
/**
|
||||
* Cache of reduced values
|
||||
|
@ -32,7 +32,7 @@ Object {
|
||||
"values": Array [
|
||||
null,
|
||||
4,
|
||||
NaN,
|
||||
undefined,
|
||||
NaN,
|
||||
],
|
||||
},
|
||||
|
@ -6,7 +6,7 @@ import isNumber from 'lodash/isNumber';
|
||||
// Types
|
||||
import { DataFrame, Field, FieldType, FieldConfig } from '../types';
|
||||
import { guessFieldTypeFromValue } from './processDataFrame';
|
||||
import { DataFrameHelper } from './dataFrameHelper';
|
||||
import { MutableDataFrame } from './dataFrameHelper';
|
||||
|
||||
export enum CSVHeaderStyle {
|
||||
full,
|
||||
@ -55,8 +55,8 @@ export class CSVReader {
|
||||
callback?: CSVParseCallbacks;
|
||||
|
||||
state: ParseState;
|
||||
data: DataFrameHelper[];
|
||||
current: DataFrameHelper;
|
||||
data: MutableDataFrame[];
|
||||
current: MutableDataFrame;
|
||||
|
||||
constructor(options?: CSVOptions) {
|
||||
if (!options) {
|
||||
@ -65,7 +65,7 @@ export class CSVReader {
|
||||
this.config = options.config || {};
|
||||
this.callback = options.callback;
|
||||
|
||||
this.current = new DataFrameHelper({ fields: [] });
|
||||
this.current = new MutableDataFrame({ fields: [] });
|
||||
this.state = ParseState.Starting;
|
||||
this.data = [];
|
||||
}
|
||||
@ -97,7 +97,7 @@ export class CSVReader {
|
||||
if (isName || headerKeys.hasOwnProperty(k)) {
|
||||
// Starting a new table after reading rows
|
||||
if (this.state === ParseState.ReadingRows) {
|
||||
this.current = new DataFrameHelper({ fields: [] });
|
||||
this.current = new MutableDataFrame({ fields: [] });
|
||||
this.data.push(this.current);
|
||||
}
|
||||
|
||||
@ -171,8 +171,8 @@ export class CSVReader {
|
||||
}
|
||||
};
|
||||
|
||||
readCSV(text: string): DataFrameHelper[] {
|
||||
this.current = new DataFrameHelper({ fields: [] });
|
||||
readCSV(text: string): MutableDataFrame[] {
|
||||
this.current = new MutableDataFrame({ fields: [] });
|
||||
this.data = [this.current];
|
||||
|
||||
const papacfg = {
|
||||
|
@ -1,20 +1,17 @@
|
||||
import { DataFrameDTO, FieldDTO, FieldType } from '../types';
|
||||
import { DataFrameHelper } from './dataFrameHelper';
|
||||
import { DataFrameDTO, FieldType } from '../types';
|
||||
import { FieldCache, MutableDataFrame } from './dataFrameHelper';
|
||||
import { toDataFrame } from './processDataFrame';
|
||||
|
||||
describe('dataFrameHelper', () => {
|
||||
const frame: DataFrameDTO = {
|
||||
const frame = toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [100, 200, 300] },
|
||||
{ name: 'name', type: FieldType.string, values: ['a', 'b', 'c'] },
|
||||
{ name: 'value', type: FieldType.number, values: [1, 2, 3] },
|
||||
{ name: 'value', type: FieldType.number, values: [4, 5, 6] },
|
||||
],
|
||||
};
|
||||
const ext = new DataFrameHelper(frame);
|
||||
|
||||
it('Should get a valid count for the fields', () => {
|
||||
expect(ext.length).toEqual(3);
|
||||
});
|
||||
const ext = new FieldCache(frame);
|
||||
|
||||
it('Should get the first field with a duplicate name', () => {
|
||||
const field = ext.getFieldByName('value');
|
||||
@ -25,15 +22,17 @@ describe('dataFrameHelper', () => {
|
||||
|
||||
describe('FieldCache', () => {
|
||||
it('when creating a new FieldCache from fields should be able to query cache', () => {
|
||||
const fields: FieldDTO[] = [
|
||||
{ name: 'time', type: FieldType.time },
|
||||
{ name: 'string', type: FieldType.string },
|
||||
{ name: 'number', type: FieldType.number },
|
||||
{ name: 'boolean', type: FieldType.boolean },
|
||||
{ name: 'other', type: FieldType.other },
|
||||
{ name: 'undefined' },
|
||||
];
|
||||
const fieldCache = new DataFrameHelper({ fields });
|
||||
const frame = toDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time },
|
||||
{ name: 'string', type: FieldType.string },
|
||||
{ name: 'number', type: FieldType.number },
|
||||
{ name: 'boolean', type: FieldType.boolean },
|
||||
{ name: 'other', type: FieldType.other },
|
||||
{ name: 'undefined' },
|
||||
],
|
||||
});
|
||||
const fieldCache = new FieldCache(frame);
|
||||
const allFields = fieldCache.getFields();
|
||||
expect(allFields).toHaveLength(6);
|
||||
|
||||
@ -99,17 +98,55 @@ describe('reverse', () => {
|
||||
],
|
||||
};
|
||||
|
||||
const helper = new DataFrameHelper(frame);
|
||||
const helper = new MutableDataFrame(frame);
|
||||
|
||||
expect(helper.getFieldByName('time')!.values.toArray()).toEqual([100, 200, 300]);
|
||||
expect(helper.getFieldByName('name')!.values.toArray()).toEqual(['a', 'b', 'c']);
|
||||
expect(helper.getFieldByName('value')!.values.toArray()).toEqual([1, 2, 3]);
|
||||
expect(helper.values.time.toArray()).toEqual([100, 200, 300]);
|
||||
expect(helper.values.name.toArray()).toEqual(['a', 'b', 'c']);
|
||||
expect(helper.values.value.toArray()).toEqual([1, 2, 3]);
|
||||
|
||||
helper.reverse();
|
||||
|
||||
expect(helper.getFieldByName('time')!.values.toArray()).toEqual([300, 200, 100]);
|
||||
expect(helper.getFieldByName('name')!.values.toArray()).toEqual(['c', 'b', 'a']);
|
||||
expect(helper.getFieldByName('value')!.values.toArray()).toEqual([3, 2, 1]);
|
||||
expect(helper.values.time.toArray()).toEqual([300, 200, 100]);
|
||||
expect(helper.values.name.toArray()).toEqual(['c', 'b', 'a']);
|
||||
expect(helper.values.value.toArray()).toEqual([3, 2, 1]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Apending DataFrame', () => {
|
||||
it('Should append values', () => {
|
||||
const dto: DataFrameDTO = {
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [100] },
|
||||
{ name: 'name', type: FieldType.string, values: ['a', 'b'] },
|
||||
{ name: 'value', type: FieldType.number, values: [1, 2, 3] },
|
||||
],
|
||||
};
|
||||
|
||||
const frame = new MutableDataFrame(dto);
|
||||
expect(frame.values.time.toArray()).toEqual([100, null, null]);
|
||||
|
||||
// Set a value on the second row
|
||||
frame.set(1, { time: 200, name: 'BB', value: 20 });
|
||||
expect(frame.toArray()).toEqual([
|
||||
{ time: 100, name: 'a', value: 1 }, // 1
|
||||
{ time: 200, name: 'BB', value: 20 }, // 2
|
||||
{ time: null, name: null, value: 3 }, // 3
|
||||
]);
|
||||
|
||||
// Set a value on the second row
|
||||
frame.add({ value2: 'XXX' }, true);
|
||||
expect(frame.toArray()).toEqual([
|
||||
{ time: 100, name: 'a', value: 1, value2: null }, // 1
|
||||
{ time: 200, name: 'BB', value: 20, value2: null }, // 2
|
||||
{ time: null, name: null, value: 3, value2: null }, // 3
|
||||
{ time: null, name: null, value: null, value2: 'XXX' }, // 4
|
||||
]);
|
||||
|
||||
// Make sure length survives a spread operator
|
||||
const keys = Object.keys(frame);
|
||||
const copy = { ...frame } as any;
|
||||
expect(keys).toContain('length');
|
||||
expect(copy.length).toEqual(frame.length);
|
||||
});
|
||||
});
|
||||
|
@ -1,174 +1,43 @@
|
||||
import { Field, FieldType, DataFrame, Vector, FieldDTO, DataFrameDTO } from '../types/dataFrame';
|
||||
import { Labels, QueryResultMeta } from '../types/data';
|
||||
import { guessFieldTypeForField, guessFieldTypeFromValue } from './processDataFrame';
|
||||
import { ArrayVector } from './vector';
|
||||
import { Labels, QueryResultMeta, KeyValue } from '../types/data';
|
||||
import { guessFieldTypeForField, guessFieldTypeFromValue, toDataFrameDTO } from './processDataFrame';
|
||||
import { ArrayVector, MutableVector, vectorToArray, CircularVector } from './vector';
|
||||
import isArray from 'lodash/isArray';
|
||||
import isString from 'lodash/isString';
|
||||
|
||||
export class DataFrameHelper implements DataFrame {
|
||||
refId?: string;
|
||||
meta?: QueryResultMeta;
|
||||
name?: string;
|
||||
fields: Field[];
|
||||
labels?: Labels;
|
||||
length = 0; // updated so it is the length of all fields
|
||||
export class FieldCache {
|
||||
fields: Field[] = [];
|
||||
|
||||
private fieldByName: { [key: string]: Field } = {};
|
||||
private fieldByType: { [key: string]: Field[] } = {};
|
||||
|
||||
constructor(data?: DataFrame | DataFrameDTO) {
|
||||
if (!data) {
|
||||
data = { fields: [] }; //
|
||||
}
|
||||
this.refId = data.refId;
|
||||
this.meta = data.meta;
|
||||
this.name = data.name;
|
||||
this.labels = data.labels;
|
||||
this.fields = [];
|
||||
for (let i = 0; i < data.fields.length; i++) {
|
||||
this.addField(data.fields[i]);
|
||||
}
|
||||
}
|
||||
constructor(private data: DataFrame) {
|
||||
this.fields = data.fields;
|
||||
|
||||
addFieldFor(value: any, name?: string): Field {
|
||||
if (!name) {
|
||||
name = `Field ${this.fields.length + 1}`;
|
||||
}
|
||||
return this.addField({
|
||||
name,
|
||||
type: guessFieldTypeFromValue(value),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reverse the direction of all fields
|
||||
*/
|
||||
reverse() {
|
||||
for (const f of this.fields) {
|
||||
f.values.toArray().reverse();
|
||||
}
|
||||
}
|
||||
|
||||
private updateTypeIndex(field: Field) {
|
||||
// Make sure it has a type
|
||||
if (field.type === FieldType.other) {
|
||||
const t = guessFieldTypeForField(field);
|
||||
if (t) {
|
||||
field.type = t;
|
||||
for (const field of data.fields) {
|
||||
// Make sure it has a type
|
||||
if (field.type === FieldType.other) {
|
||||
const t = guessFieldTypeForField(field);
|
||||
if (t) {
|
||||
field.type = t;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!this.fieldByType[field.type]) {
|
||||
this.fieldByType[field.type] = [];
|
||||
}
|
||||
this.fieldByType[field.type].push(field);
|
||||
}
|
||||
if (!this.fieldByType[field.type]) {
|
||||
this.fieldByType[field.type] = [];
|
||||
}
|
||||
this.fieldByType[field.type].push(field);
|
||||
|
||||
addField(f: Field | FieldDTO): Field {
|
||||
const type = f.type || FieldType.other;
|
||||
const values =
|
||||
!f.values || isArray(f.values)
|
||||
? new ArrayVector(f.values as any[] | undefined) // array or empty
|
||||
: (f.values as Vector);
|
||||
|
||||
// And a name
|
||||
let name = f.name;
|
||||
if (!name) {
|
||||
if (type === FieldType.time) {
|
||||
name = `Time ${this.fields.length + 1}`;
|
||||
if (this.fieldByName[field.name]) {
|
||||
console.warn('Duplicate field names in DataFrame: ', field.name);
|
||||
} else {
|
||||
name = `Column ${this.fields.length + 1}`;
|
||||
this.fieldByName[field.name] = field;
|
||||
}
|
||||
}
|
||||
const field: Field = {
|
||||
name,
|
||||
type,
|
||||
config: f.config || {},
|
||||
values,
|
||||
};
|
||||
this.updateTypeIndex(field);
|
||||
|
||||
if (this.fieldByName[field.name]) {
|
||||
console.warn('Duplicate field names in DataFrame: ', field.name);
|
||||
} else {
|
||||
this.fieldByName[field.name] = field;
|
||||
}
|
||||
|
||||
// Make sure the lengths all match
|
||||
if (field.values.length !== this.length) {
|
||||
if (field.values.length > this.length) {
|
||||
// Add `null` to all other values
|
||||
const newlen = field.values.length;
|
||||
for (const fx of this.fields) {
|
||||
const arr = fx.values as ArrayVector;
|
||||
while (fx.values.length !== newlen) {
|
||||
arr.buffer.push(null);
|
||||
}
|
||||
}
|
||||
this.length = field.values.length;
|
||||
} else {
|
||||
const arr = field.values as ArrayVector;
|
||||
while (field.values.length !== this.length) {
|
||||
arr.buffer.push(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.fields.push(field);
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* This will add each value to the corresponding column
|
||||
*/
|
||||
appendRow(row: any[]) {
|
||||
for (let i = this.fields.length; i < row.length; i++) {
|
||||
this.addFieldFor(row[i]);
|
||||
}
|
||||
|
||||
// The first line may change the field types
|
||||
if (this.length < 1) {
|
||||
this.fieldByType = {};
|
||||
for (let i = 0; i < this.fields.length; i++) {
|
||||
const f = this.fields[i];
|
||||
if (!f.type || f.type === FieldType.other) {
|
||||
f.type = guessFieldTypeFromValue(row[i]);
|
||||
}
|
||||
this.updateTypeIndex(f);
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < this.fields.length; i++) {
|
||||
const f = this.fields[i];
|
||||
let v = row[i];
|
||||
if (!f.parse) {
|
||||
f.parse = makeFieldParser(v, f);
|
||||
}
|
||||
v = f.parse(v);
|
||||
|
||||
const arr = f.values as ArrayVector;
|
||||
arr.buffer.push(v); // may be undefined
|
||||
}
|
||||
this.length++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add any values that match the field names
|
||||
*/
|
||||
appendRowFrom(obj: { [key: string]: any }) {
|
||||
for (const f of this.fields) {
|
||||
const v = obj[f.name];
|
||||
if (!f.parse) {
|
||||
f.parse = makeFieldParser(v, f);
|
||||
}
|
||||
|
||||
const arr = f.values as ArrayVector;
|
||||
arr.buffer.push(f.parse(v)); // may be undefined
|
||||
}
|
||||
this.length++;
|
||||
}
|
||||
|
||||
getFields(type?: FieldType): Field[] {
|
||||
if (!type) {
|
||||
return [...this.fields]; // All fields
|
||||
return [...this.data.fields]; // All fields
|
||||
}
|
||||
const fields = this.fieldByType[type];
|
||||
if (fields) {
|
||||
@ -202,7 +71,7 @@ export class DataFrameHelper implements DataFrame {
|
||||
}
|
||||
}
|
||||
|
||||
function makeFieldParser(value: string, field: Field): (value: string) => any {
|
||||
function makeFieldParser(value: any, field: Field): (value: string) => any {
|
||||
if (!field.type) {
|
||||
if (field.name === 'time' || field.name === 'Time') {
|
||||
field.type = FieldType.time;
|
||||
@ -227,3 +96,297 @@ function makeFieldParser(value: string, field: Field): (value: string) => any {
|
||||
// Just pass the string back
|
||||
return (value: string) => value;
|
||||
}
|
||||
|
||||
export type MutableField<T = any> = Field<T, MutableVector<T>>;
|
||||
|
||||
type MutableVectorCreator = (buffer?: any[]) => MutableVector;
|
||||
|
||||
export const MISSING_VALUE: any = null;
|
||||
|
||||
export class MutableDataFrame<T = any> implements DataFrame, MutableVector<T> {
|
||||
name?: string;
|
||||
labels?: Labels;
|
||||
refId?: string;
|
||||
meta?: QueryResultMeta;
|
||||
|
||||
fields: MutableField[] = [];
|
||||
values: KeyValue<MutableVector> = {};
|
||||
|
||||
private first: Vector = new ArrayVector();
|
||||
private creator: MutableVectorCreator;
|
||||
|
||||
constructor(source?: DataFrame | DataFrameDTO, creator?: MutableVectorCreator) {
|
||||
// This creates the underlying storage buffers
|
||||
this.creator = creator
|
||||
? creator
|
||||
: (buffer?: any[]) => {
|
||||
return new ArrayVector(buffer);
|
||||
};
|
||||
|
||||
// Copy values from
|
||||
if (source) {
|
||||
const { name, labels, refId, meta, fields } = source;
|
||||
if (name) {
|
||||
this.name = name;
|
||||
}
|
||||
if (labels) {
|
||||
this.labels = labels;
|
||||
}
|
||||
if (refId) {
|
||||
this.refId = refId;
|
||||
}
|
||||
if (meta) {
|
||||
this.meta = meta;
|
||||
}
|
||||
if (fields) {
|
||||
for (const f of fields) {
|
||||
this.addField(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get Length to show up if you use spread
|
||||
Object.defineProperty(this, 'length', {
|
||||
enumerable: true,
|
||||
get: () => {
|
||||
return this.first.length;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Defined for Vector interface
|
||||
get length() {
|
||||
return this.first.length;
|
||||
}
|
||||
|
||||
addFieldFor(value: any, name?: string): MutableField {
|
||||
return this.addField({
|
||||
name: name || '', // Will be filled in
|
||||
type: guessFieldTypeFromValue(value),
|
||||
});
|
||||
}
|
||||
|
||||
addField(f: Field | FieldDTO, startLength?: number): MutableField {
|
||||
let buffer: any[] | undefined = undefined;
|
||||
|
||||
if (f.values) {
|
||||
if (isArray(f.values)) {
|
||||
buffer = f.values as any[];
|
||||
} else {
|
||||
buffer = (f.values as Vector).toArray();
|
||||
}
|
||||
}
|
||||
|
||||
let type = f.type;
|
||||
|
||||
if (!type && ('time' === f.name || 'Time' === f.name)) {
|
||||
type = FieldType.time;
|
||||
} else {
|
||||
if (!type && buffer && buffer.length) {
|
||||
type = guessFieldTypeFromValue(buffer[0]);
|
||||
}
|
||||
if (!type) {
|
||||
type = FieldType.other;
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure it has a name
|
||||
let name = f.name;
|
||||
if (!name) {
|
||||
if (type === FieldType.time) {
|
||||
name = this.values['Time'] ? `Time ${this.fields.length + 1}` : 'Time';
|
||||
} else {
|
||||
name = `Field ${this.fields.length + 1}`;
|
||||
}
|
||||
}
|
||||
|
||||
const field: MutableField = {
|
||||
name,
|
||||
type,
|
||||
config: f.config || {},
|
||||
values: this.creator(buffer),
|
||||
};
|
||||
|
||||
if (type === FieldType.other) {
|
||||
type = guessFieldTypeForField(field);
|
||||
if (type) {
|
||||
field.type = type;
|
||||
}
|
||||
}
|
||||
|
||||
this.fields.push(field);
|
||||
this.first = this.fields[0].values;
|
||||
|
||||
// The Field Already exists
|
||||
if (this.values[name]) {
|
||||
console.warn(`Duplicate field names found: ${name}, only the first will be accessible`);
|
||||
} else {
|
||||
this.values[name] = field.values;
|
||||
}
|
||||
|
||||
// Make sure the field starts with a given length
|
||||
if (startLength) {
|
||||
while (field.values.length < startLength) {
|
||||
field.values.add(MISSING_VALUE);
|
||||
}
|
||||
} else {
|
||||
this.validate();
|
||||
}
|
||||
|
||||
return field;
|
||||
}
|
||||
|
||||
validate() {
|
||||
// Make sure all arrays are the same length
|
||||
const length = this.fields.reduce((v: number, f) => {
|
||||
return Math.max(v, f.values.length);
|
||||
}, 0);
|
||||
|
||||
// Add empty elements until everything mastches
|
||||
for (const field of this.fields) {
|
||||
while (field.values.length !== length) {
|
||||
field.values.add(MISSING_VALUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private addMissingFieldsFor(value: any) {
|
||||
for (const key of Object.keys(value)) {
|
||||
if (!this.values[key]) {
|
||||
this.addField({
|
||||
name: key,
|
||||
type: guessFieldTypeFromValue(value[key]),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reverse all values
|
||||
*/
|
||||
reverse() {
|
||||
for (const f of this.fields) {
|
||||
f.values.reverse();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This will add each value to the corresponding column
|
||||
*/
|
||||
appendRow(row: any[]) {
|
||||
// Add any extra columns
|
||||
for (let i = this.fields.length; i < row.length; i++) {
|
||||
this.addField({
|
||||
name: `Field ${i + 1}`,
|
||||
type: guessFieldTypeFromValue(row[i]),
|
||||
});
|
||||
}
|
||||
|
||||
// The first line may change the field types
|
||||
if (this.length < 1) {
|
||||
for (let i = 0; i < this.fields.length; i++) {
|
||||
const f = this.fields[i];
|
||||
if (!f.type || f.type === FieldType.other) {
|
||||
f.type = guessFieldTypeFromValue(row[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < this.fields.length; i++) {
|
||||
const f = this.fields[i];
|
||||
let v = row[i];
|
||||
if (f.type !== FieldType.string && isString(v)) {
|
||||
if (!f.parse) {
|
||||
f.parse = makeFieldParser(v, f);
|
||||
}
|
||||
v = f.parse(v);
|
||||
}
|
||||
f.values.add(v);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all properties of the value as fields on the frame
|
||||
*/
|
||||
add(value: T, addMissingFields?: boolean) {
|
||||
if (addMissingFields) {
|
||||
this.addMissingFieldsFor(value);
|
||||
}
|
||||
|
||||
// Will add one value for every field
|
||||
const obj = value as any;
|
||||
for (const field of this.fields) {
|
||||
let val = obj[field.name];
|
||||
|
||||
if (field.type !== FieldType.string && isString(val)) {
|
||||
if (!field.parse) {
|
||||
field.parse = makeFieldParser(val, field);
|
||||
}
|
||||
val = field.parse(val);
|
||||
}
|
||||
|
||||
if (val === undefined) {
|
||||
val = MISSING_VALUE;
|
||||
}
|
||||
|
||||
field.values.add(val);
|
||||
}
|
||||
}
|
||||
|
||||
set(index: number, value: T, addMissingFields?: boolean) {
|
||||
if (index > this.length) {
|
||||
throw new Error('Unable ot set value beyond current length');
|
||||
}
|
||||
|
||||
if (addMissingFields) {
|
||||
this.addMissingFieldsFor(value);
|
||||
}
|
||||
|
||||
const obj = (value as any) || {};
|
||||
for (const field of this.fields) {
|
||||
field.values.set(index, obj[field.name]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an object with a property for each field in the DataFrame
|
||||
*/
|
||||
get(idx: number): T {
|
||||
const v: any = {};
|
||||
for (const field of this.fields) {
|
||||
v[field.name] = field.values.get(idx);
|
||||
}
|
||||
return v as T;
|
||||
}
|
||||
|
||||
toArray(): T[] {
|
||||
return vectorToArray(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* The simplified JSON values used in JSON.stringify()
|
||||
*/
|
||||
toJSON() {
|
||||
return toDataFrameDTO(this);
|
||||
}
|
||||
}
|
||||
|
||||
interface CircularOptions {
|
||||
append?: 'head' | 'tail';
|
||||
capacity?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* This dataframe can have values constantly added, and will never
|
||||
* exceed the given capacity
|
||||
*/
|
||||
export class CircularDataFrame<T = any> extends MutableDataFrame<T> {
|
||||
constructor(options: CircularOptions) {
|
||||
super(undefined, (buffer?: any[]) => {
|
||||
return new CircularVector({
|
||||
buffer,
|
||||
...options,
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { FieldType, DataFrameDTO } from '../types/index';
|
||||
import { DataFrameHelper } from './dataFrameHelper';
|
||||
import { MutableDataFrame } from './dataFrameHelper';
|
||||
import { DataFrameView } from './dataFrameView';
|
||||
import { DateTime } from './moment_wrapper';
|
||||
|
||||
@ -18,7 +18,7 @@ describe('dataFrameView', () => {
|
||||
{ name: 'value', type: FieldType.number, values: [1, 2, 3] },
|
||||
],
|
||||
};
|
||||
const ext = new DataFrameHelper(frame);
|
||||
const ext = new MutableDataFrame(frame);
|
||||
const vector = new DataFrameView<MySpecialObject>(ext);
|
||||
|
||||
it('Should get a typed vector', () => {
|
||||
@ -73,4 +73,12 @@ describe('dataFrameView', () => {
|
||||
expect(first.name).toEqual('b');
|
||||
expect(first.name).toEqual(second.name);
|
||||
});
|
||||
|
||||
it('toJSON returns plain object', () => {
|
||||
expect(vector.toJSON()[0]).toEqual({
|
||||
time: 100,
|
||||
name: 'a',
|
||||
value: 1,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -2,7 +2,7 @@ import { fieldReducers, ReducerID, reduceField } from './fieldReducer';
|
||||
|
||||
import _ from 'lodash';
|
||||
import { Field, FieldType } from '../types/index';
|
||||
import { DataFrameHelper } from './dataFrameHelper';
|
||||
import { MutableDataFrame } from './dataFrameHelper';
|
||||
import { ArrayVector } from './vector';
|
||||
import { guessFieldTypeFromValue } from './processDataFrame';
|
||||
|
||||
@ -24,7 +24,7 @@ function createField<T>(name: string, values?: T[], type?: FieldType): Field<T>
|
||||
}
|
||||
|
||||
describe('Stats Calculators', () => {
|
||||
const basicTable = new DataFrameHelper({
|
||||
const basicTable = new MutableDataFrame({
|
||||
fields: [{ name: 'a', values: [10, 20] }, { name: 'b', values: [20, 30] }, { name: 'c', values: [30, 40] }],
|
||||
});
|
||||
|
||||
|
@ -9,7 +9,7 @@ import {
|
||||
} from './processDataFrame';
|
||||
import { FieldType, TimeSeries, TableData, DataFrameDTO } from '../types/index';
|
||||
import { dateTime } from './moment_wrapper';
|
||||
import { DataFrameHelper } from './dataFrameHelper';
|
||||
import { MutableDataFrame } from './dataFrameHelper';
|
||||
|
||||
describe('toDataFrame', () => {
|
||||
it('converts timeseries to series', () => {
|
||||
@ -89,7 +89,7 @@ describe('toDataFrame', () => {
|
||||
});
|
||||
|
||||
it('Guess Colum Types from series', () => {
|
||||
const series = new DataFrameHelper({
|
||||
const series = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'A (number)', values: [123, null] },
|
||||
{ name: 'B (strings)', values: [null, 'Hello'] },
|
||||
|
@ -19,7 +19,7 @@ import {
|
||||
} from '../types/index';
|
||||
import { isDateTime } from './moment_wrapper';
|
||||
import { ArrayVector, SortedVector } from './vector';
|
||||
import { DataFrameHelper } from './dataFrameHelper';
|
||||
import { MutableDataFrame } from './dataFrameHelper';
|
||||
import { deprecationWarning } from './deprecationWarning';
|
||||
|
||||
function convertTableToDataFrame(table: TableData): DataFrame {
|
||||
@ -221,7 +221,7 @@ export const toDataFrame = (data: any): DataFrame => {
|
||||
if (data.hasOwnProperty('fields')) {
|
||||
// @deprecated -- remove in 6.5
|
||||
if (data.hasOwnProperty('rows')) {
|
||||
const v = new DataFrameHelper(data as DataFrameDTO);
|
||||
const v = new MutableDataFrame(data as DataFrameDTO);
|
||||
const rows = data.rows as any[][];
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
v.appendRow(rows[i]);
|
||||
@ -234,7 +234,9 @@ export const toDataFrame = (data: any): DataFrame => {
|
||||
if (data.hasOwnProperty('length')) {
|
||||
return data as DataFrame;
|
||||
}
|
||||
return new DataFrameHelper(data as DataFrameDTO);
|
||||
|
||||
// This will convert the array values into Vectors
|
||||
return new MutableDataFrame(data as DataFrameDTO);
|
||||
}
|
||||
if (data.hasOwnProperty('datapoints')) {
|
||||
return convertTimeSeriesToDataFrame(data);
|
||||
@ -344,6 +346,35 @@ export function sortDataFrame(data: DataFrame, sortIndex?: number, reverse = fal
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy with all values reversed
|
||||
*/
|
||||
export function reverseDataFrame(data: DataFrame): DataFrame {
|
||||
return {
|
||||
...data,
|
||||
fields: data.fields.map(f => {
|
||||
const copy = [...f.values.toArray()];
|
||||
copy.reverse();
|
||||
return {
|
||||
...f,
|
||||
values: new ArrayVector(copy),
|
||||
};
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
export const getTimeField = (series: DataFrame): { timeField?: Field; timeIndex?: number } => {
|
||||
for (let i = 0; i < series.fields.length; i++) {
|
||||
if (series.fields[i].type === FieldType.time) {
|
||||
return {
|
||||
timeField: series.fields[i],
|
||||
timeIndex: i,
|
||||
};
|
||||
}
|
||||
}
|
||||
return {};
|
||||
};
|
||||
|
||||
/**
|
||||
* Wrapper to get an array from each field value
|
||||
*/
|
||||
|
@ -35,7 +35,7 @@ describe('Append Transformer', () => {
|
||||
const fieldC = processed.fields[2];
|
||||
|
||||
expect(fieldA.values.toArray()).toEqual([1, 2, 3, 4]);
|
||||
expect(fieldB.values.toArray()).toEqual([100, 200, undefined, undefined]);
|
||||
expect(fieldC.values.toArray()).toEqual([undefined, undefined, 3000, 4000]);
|
||||
expect(fieldB.values.toArray()).toEqual([100, 200, null, null]);
|
||||
expect(fieldC.values.toArray()).toEqual([null, null, 3000, 4000]);
|
||||
});
|
||||
});
|
||||
|
@ -1,9 +1,7 @@
|
||||
import { DataTransformerInfo } from './transformers';
|
||||
import { DataFrame } from '../../types/dataFrame';
|
||||
import { DataTransformerID } from './ids';
|
||||
import { DataFrameHelper } from '../dataFrameHelper';
|
||||
import { KeyValue } from '../../types/data';
|
||||
import { AppendedVectors } from '../vector';
|
||||
import { MutableDataFrame } from '../dataFrameHelper';
|
||||
|
||||
export interface AppendOptions {}
|
||||
|
||||
@ -23,34 +21,37 @@ export const appendTransformer: DataTransformerInfo<AppendOptions> = {
|
||||
return data;
|
||||
}
|
||||
|
||||
let length = 0;
|
||||
const processed = new DataFrameHelper();
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
// Add the first row
|
||||
const processed = new MutableDataFrame();
|
||||
for (const f of data[0].fields) {
|
||||
processed.addField({
|
||||
...f,
|
||||
values: [...f.values.toArray()],
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 1; i < data.length; i++) {
|
||||
const frame = data[i];
|
||||
const used: KeyValue<boolean> = {};
|
||||
const startLength = frame.length;
|
||||
for (let j = 0; j < frame.fields.length; j++) {
|
||||
const src = frame.fields[j];
|
||||
if (used[src.name]) {
|
||||
continue;
|
||||
let vals = processed.values[src.name];
|
||||
if (!vals) {
|
||||
vals = processed.addField(
|
||||
{
|
||||
...src,
|
||||
values: [],
|
||||
},
|
||||
startLength
|
||||
).values;
|
||||
}
|
||||
used[src.name] = true;
|
||||
|
||||
let f = processed.getFieldByName(src.name);
|
||||
if (!f) {
|
||||
f = processed.addField({
|
||||
...src,
|
||||
values: new AppendedVectors(length),
|
||||
});
|
||||
// Add each row
|
||||
for (let k = 0; k < frame.length; k++) {
|
||||
vals.add(src.values.get(k));
|
||||
}
|
||||
(f.values as AppendedVectors).append(src.values);
|
||||
}
|
||||
|
||||
// Make sure all fields have their length updated
|
||||
length += frame.length;
|
||||
processed.length = length;
|
||||
for (const f of processed.fields) {
|
||||
(f.values as AppendedVectors).setLength(processed.length);
|
||||
}
|
||||
processed.validate();
|
||||
}
|
||||
return [processed];
|
||||
};
|
||||
|
@ -8,7 +8,29 @@ export function vectorToArray<T>(v: Vector<T>): T[] {
|
||||
return arr;
|
||||
}
|
||||
|
||||
export class ArrayVector<T = any> implements Vector<T> {
|
||||
/**
|
||||
* Apache arrow vectors are Read/Write
|
||||
*/
|
||||
export interface ReadWriteVector<T = any> extends Vector<T> {
|
||||
set: (index: number, value: T) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Vector with standard manipulation functions
|
||||
*/
|
||||
export interface MutableVector<T = any> extends ReadWriteVector<T> {
|
||||
/**
|
||||
* Adds the value to the vector
|
||||
*/
|
||||
add: (value: T) => void;
|
||||
|
||||
/**
|
||||
* modifies the vector so it is now the oposite order
|
||||
*/
|
||||
reverse: () => void;
|
||||
}
|
||||
|
||||
export class ArrayVector<T = any> implements MutableVector<T> {
|
||||
buffer: T[];
|
||||
|
||||
constructor(buffer?: T[]) {
|
||||
@ -19,10 +41,22 @@ export class ArrayVector<T = any> implements Vector<T> {
|
||||
return this.buffer.length;
|
||||
}
|
||||
|
||||
add(value: T) {
|
||||
this.buffer.push(value);
|
||||
}
|
||||
|
||||
get(index: number): T {
|
||||
return this.buffer[index];
|
||||
}
|
||||
|
||||
set(index: number, value: T) {
|
||||
this.buffer[index] = value;
|
||||
}
|
||||
|
||||
reverse() {
|
||||
this.buffer.reverse();
|
||||
}
|
||||
|
||||
toArray(): T[] {
|
||||
return this.buffer;
|
||||
}
|
||||
@ -109,7 +143,7 @@ interface CircularOptions<T> {
|
||||
* This supports addting to the 'head' or 'tail' and will grow the buffer
|
||||
* to match a configured capacity.
|
||||
*/
|
||||
export class CircularVector<T = any> implements Vector<T> {
|
||||
export class CircularVector<T = any> implements MutableVector<T> {
|
||||
private buffer: T[];
|
||||
private index: number;
|
||||
private capacity: number;
|
||||
@ -202,6 +236,10 @@ export class CircularVector<T = any> implements Vector<T> {
|
||||
}
|
||||
}
|
||||
|
||||
reverse() {
|
||||
this.buffer.reverse();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the value to the buffer
|
||||
*/
|
||||
@ -211,6 +249,10 @@ export class CircularVector<T = any> implements Vector<T> {
|
||||
return this.buffer[(index + this.index) % this.buffer.length];
|
||||
}
|
||||
|
||||
set(index: number, value: T) {
|
||||
this.buffer[(index + this.index) % this.buffer.length] = value;
|
||||
}
|
||||
|
||||
get length() {
|
||||
return this.buffer.length;
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { DataFrameHelper, FieldType, LogRowModel } from '@grafana/data';
|
||||
import { FieldType, LogRowModel, MutableDataFrame } from '@grafana/data';
|
||||
import { getRowContexts } from './LogRowContextProvider';
|
||||
import { Labels, LogLevel } from '@grafana/data/src';
|
||||
import { DataQueryResponse } from '../../types';
|
||||
@ -6,7 +6,7 @@ import { DataQueryResponse } from '../../types';
|
||||
describe('getRowContexts', () => {
|
||||
describe('when called with a DataFrame and results are returned', () => {
|
||||
it('then the result should be in correct format', async () => {
|
||||
const firstResult = new DataFrameHelper({
|
||||
const firstResult = new MutableDataFrame({
|
||||
refId: 'B',
|
||||
labels: {},
|
||||
fields: [
|
||||
@ -14,7 +14,7 @@ describe('getRowContexts', () => {
|
||||
{ name: 'line', type: FieldType.string, values: ['3', '2', '1'] },
|
||||
],
|
||||
});
|
||||
const secondResult = new DataFrameHelper({
|
||||
const secondResult = new MutableDataFrame({
|
||||
refId: 'B',
|
||||
labels: {},
|
||||
fields: [
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { getFieldProperties, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
|
||||
import { ReducerID, Threshold, DataFrameHelper } from '@grafana/data';
|
||||
import { ReducerID, Threshold, toDataFrame } from '@grafana/data';
|
||||
import { GrafanaThemeType } from '../types/theme';
|
||||
import { getTheme } from '../themes/index';
|
||||
|
||||
@ -34,7 +34,7 @@ describe('FieldDisplay', () => {
|
||||
// Simple test dataset
|
||||
const options: GetFieldDisplayValuesOptions = {
|
||||
data: [
|
||||
new DataFrameHelper({
|
||||
toDataFrame({
|
||||
name: 'Series Name',
|
||||
fields: [
|
||||
{ name: 'Field 1', values: ['a', 'b', 'c'] },
|
||||
|
@ -7,6 +7,7 @@ import {
|
||||
DisplayValue,
|
||||
GraphSeriesValue,
|
||||
DataFrameView,
|
||||
getTimeField,
|
||||
} from '@grafana/data';
|
||||
|
||||
import toNumber from 'lodash/toNumber';
|
||||
@ -82,17 +83,6 @@ export interface GetFieldDisplayValuesOptions {
|
||||
|
||||
export const DEFAULT_FIELD_DISPLAY_VALUES_LIMIT = 25;
|
||||
|
||||
const getTimeColumnIdx = (series: DataFrame) => {
|
||||
let timeColumn = -1;
|
||||
for (let i = 0; i < series.fields.length; i++) {
|
||||
if (series.fields[i].type === FieldType.time) {
|
||||
timeColumn = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return timeColumn;
|
||||
};
|
||||
|
||||
export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): FieldDisplay[] => {
|
||||
const { data, replaceVariables, fieldOptions } = options;
|
||||
const { defaults, override } = fieldOptions;
|
||||
@ -117,7 +107,7 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
|
||||
scopedVars[DataLinkBuiltInVars.seriesName] = { text: 'Series', value: series.name };
|
||||
|
||||
const timeColumn = getTimeColumnIdx(series);
|
||||
const { timeField } = getTimeField(series);
|
||||
const view = new DataFrameView(series);
|
||||
|
||||
for (let i = 0; i < series.fields.length && !hitLimit; i++) {
|
||||
@ -184,9 +174,9 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
let sparkline: GraphSeriesValue[][] | undefined = undefined;
|
||||
|
||||
// Single sparkline for every reducer
|
||||
if (options.sparkline && timeColumn >= 0) {
|
||||
if (options.sparkline && timeField) {
|
||||
sparkline = getFlotPairs({
|
||||
xField: series.fields[timeColumn],
|
||||
xField: timeField,
|
||||
yField: series.fields[i],
|
||||
});
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { getFlotPairs } from './flotPairs';
|
||||
import { DataFrameHelper } from '@grafana/data';
|
||||
import { MutableDataFrame } from '@grafana/data';
|
||||
|
||||
describe('getFlotPairs', () => {
|
||||
const series = new DataFrameHelper({
|
||||
const series = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'a', values: [1, 2, 3] },
|
||||
{ name: 'b', values: [100, 200, 300] },
|
||||
|
@ -15,13 +15,13 @@ import {
|
||||
LogsMetaItem,
|
||||
LogsMetaKind,
|
||||
LogsDedupStrategy,
|
||||
DataFrameHelper,
|
||||
GraphSeriesXY,
|
||||
LoadingState,
|
||||
dateTime,
|
||||
toUtc,
|
||||
NullValueMode,
|
||||
toDataFrame,
|
||||
FieldCache,
|
||||
} from '@grafana/data';
|
||||
import { getThemeColor } from 'app/core/utils/colors';
|
||||
import { hasAnsiCodes } from 'app/core/utils/text';
|
||||
@ -238,22 +238,22 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
|
||||
|
||||
for (let i = 0; i < logSeries.length; i++) {
|
||||
const series = logSeries[i];
|
||||
const data = new DataFrameHelper(series);
|
||||
const fieldCache = new FieldCache(series);
|
||||
const uniqueLabels = findUniqueLabels(series.labels, commonLabels);
|
||||
if (Object.keys(uniqueLabels).length > 0) {
|
||||
hasUniqueLabels = true;
|
||||
}
|
||||
|
||||
const timeFieldIndex = data.getFirstFieldOfType(FieldType.time);
|
||||
const stringField = data.getFirstFieldOfType(FieldType.string);
|
||||
const logLevelField = data.getFieldByName('level');
|
||||
const timeFieldIndex = fieldCache.getFirstFieldOfType(FieldType.time);
|
||||
const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
|
||||
const logLevelField = fieldCache.getFieldByName('level');
|
||||
|
||||
let seriesLogLevel: LogLevel | undefined = undefined;
|
||||
if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
|
||||
seriesLogLevel = getLogLevelFromKey(series.labels['level']);
|
||||
}
|
||||
|
||||
for (let j = 0; j < data.length; j++) {
|
||||
for (let j = 0; j < series.length; j++) {
|
||||
const ts = timeFieldIndex.values.get(j);
|
||||
const time = dateTime(ts);
|
||||
const timeEpochMs = time.valueOf();
|
||||
|
@ -5,7 +5,7 @@ import {
|
||||
LogsMetaKind,
|
||||
LogsDedupStrategy,
|
||||
LogLevel,
|
||||
DataFrameHelper,
|
||||
MutableDataFrame,
|
||||
toDataFrame,
|
||||
} from '@grafana/data';
|
||||
import { dedupLogRows, dataFrameToLogsModel } from '../logs_model';
|
||||
@ -168,7 +168,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given series without a time field should return empty logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new DataFrameHelper({
|
||||
new MutableDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'message',
|
||||
@ -183,7 +183,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given series without a string field should return empty logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new DataFrameHelper({
|
||||
new MutableDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
@ -198,7 +198,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given one series should return expected logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new DataFrameHelper({
|
||||
new MutableDataFrame({
|
||||
labels: {
|
||||
filename: '/var/log/grafana/grafana.log',
|
||||
job: 'grafana',
|
||||
@ -259,7 +259,7 @@ describe('dataFrameToLogsModel', () => {
|
||||
|
||||
it('given one series without labels should return expected logs model', () => {
|
||||
const series: DataFrame[] = [
|
||||
new DataFrameHelper({
|
||||
new MutableDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { PanelQueryRunner, QueryRunnerOptions } from './PanelQueryRunner';
|
||||
import { PanelData, DataQueryRequest, DataStreamObserver, DataStreamState, ScopedVars } from '@grafana/ui';
|
||||
|
||||
import { LoadingState, DataFrameHelper } from '@grafana/data';
|
||||
import { LoadingState, MutableDataFrame } from '@grafana/data';
|
||||
import { dateTime } from '@grafana/data';
|
||||
import { SHARED_DASHBODARD_QUERY } from 'app/plugins/datasource/dashboard/SharedQueryRunner';
|
||||
import { DashboardQuery } from 'app/plugins/datasource/dashboard/types';
|
||||
@ -200,7 +200,7 @@ describe('PanelQueryRunner', () => {
|
||||
state: LoadingState.Streaming,
|
||||
key: 'test-stream-1',
|
||||
data: [
|
||||
new DataFrameHelper({
|
||||
new MutableDataFrame({
|
||||
fields: [],
|
||||
name: 'I am a magic stream',
|
||||
}),
|
||||
|
@ -163,6 +163,8 @@ describe('ResultProcessor', () => {
|
||||
const { resultProcessor } = testContext({ mode: ExploreMode.Logs, observerResponse: null });
|
||||
const theResult = resultProcessor.getLogsResult();
|
||||
|
||||
console.log(JSON.stringify(theResult));
|
||||
|
||||
expect(theResult).toEqual({
|
||||
hasUniqueLabels: false,
|
||||
meta: [],
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { PanelModel } from 'app/features/dashboard/state/PanelModel';
|
||||
import { FieldDisplay, ScopedVars, DataLinkBuiltInVars } from '@grafana/ui';
|
||||
import { LinkModelSupplier, DataFrameHelper, FieldType } from '@grafana/data';
|
||||
import { LinkModelSupplier, getTimeField } from '@grafana/data';
|
||||
import { getLinkSrv } from './link_srv';
|
||||
|
||||
/**
|
||||
@ -28,9 +28,9 @@ export const getFieldLinksSupplier = (value: FieldDisplay): LinkModelSupplier<Fi
|
||||
if (value.row) {
|
||||
const row = value.view.get(value.row);
|
||||
console.log('ROW:', row);
|
||||
const dataFrame = new DataFrameHelper(value.view.dataFrame);
|
||||
const dataFrame = value.view.dataFrame;
|
||||
|
||||
const timeField = dataFrame.getFirstFieldOfType(FieldType.time);
|
||||
const { timeField } = getTimeField(dataFrame);
|
||||
if (timeField) {
|
||||
scopedVars[DataLinkBuiltInVars.valueTime] = {
|
||||
text: 'Value time',
|
||||
|
@ -2,7 +2,7 @@ import _ from 'lodash';
|
||||
import flatten from 'app/core/utils/flatten';
|
||||
import * as queryDef from './query_def';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import { DataFrame, toDataFrame, FieldType, DataFrameHelper } from '@grafana/data';
|
||||
import { DataFrame, toDataFrame, FieldType, MutableDataFrame } from '@grafana/data';
|
||||
import { DataQueryResponse } from '@grafana/ui';
|
||||
import { ElasticsearchAggregation } from './types';
|
||||
|
||||
@ -464,7 +464,7 @@ export class ElasticResponse {
|
||||
|
||||
if (docs.length > 0) {
|
||||
propNames = propNames.sort();
|
||||
const series = new DataFrameHelper({ fields: [] });
|
||||
const series = new MutableDataFrame({ fields: [] });
|
||||
|
||||
series.addField({
|
||||
name: this.targets[0].timeField,
|
||||
@ -513,7 +513,7 @@ export class ElasticResponse {
|
||||
|
||||
// Add a row for each document
|
||||
for (const doc of docs) {
|
||||
series.appendRowFrom(doc);
|
||||
series.add(doc);
|
||||
}
|
||||
|
||||
dataFrame.push(series);
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { DataFrameView, KeyValue, MutableDataFrame } from '@grafana/data';
|
||||
import { ElasticResponse } from '../elastic_response';
|
||||
import { DataFrameHelper, DataFrameView, KeyValue } from '@grafana/data';
|
||||
|
||||
describe('ElasticResponse', () => {
|
||||
let targets;
|
||||
@ -859,7 +859,7 @@ describe('ElasticResponse', () => {
|
||||
|
||||
it('should return histogram aggregation and documents', () => {
|
||||
expect(result.data.length).toBe(2);
|
||||
const logResults = result.data[0] as DataFrameHelper;
|
||||
const logResults = result.data[0] as MutableDataFrame;
|
||||
const fields = logResults.fields.map(f => {
|
||||
return {
|
||||
name: f.name,
|
||||
@ -874,16 +874,15 @@ describe('ElasticResponse', () => {
|
||||
let rows = new DataFrameView(logResults);
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const r = rows.get(i);
|
||||
const row = [r._id, r._type, r._index, r._source];
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._id);
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._type);
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._index);
|
||||
expect(row).toContain(JSON.stringify(response.responses[0].hits.hits[i]._source, undefined, 2));
|
||||
expect(r._id).toEqual(response.responses[0].hits.hits[i]._id);
|
||||
expect(r._type).toEqual(response.responses[0].hits.hits[i]._type);
|
||||
expect(r._index).toEqual(response.responses[0].hits.hits[i]._index);
|
||||
expect(r._source).toEqual(response.responses[0].hits.hits[i]._source);
|
||||
}
|
||||
|
||||
// Make a map from the histogram results
|
||||
const hist: KeyValue<number> = {};
|
||||
const histogramResults = new DataFrameHelper(result.data[1]);
|
||||
const histogramResults = new MutableDataFrame(result.data[1]);
|
||||
rows = new DataFrameView(histogramResults);
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const row = rows.get(i);
|
||||
|
@ -5,7 +5,7 @@ import React, { PureComponent } from 'react';
|
||||
import { InputOptions } from './types';
|
||||
|
||||
import { DataSourcePluginOptionsEditorProps, DataSourceSettings, TableInputCSV } from '@grafana/ui';
|
||||
import { DataFrame, DataFrameHelper } from '@grafana/data';
|
||||
import { DataFrame, MutableDataFrame } from '@grafana/data';
|
||||
import { dataFrameToCSV } from './utils';
|
||||
|
||||
type InputSettings = DataSourceSettings<InputOptions>;
|
||||
@ -32,7 +32,7 @@ export class InputConfigEditor extends PureComponent<Props, State> {
|
||||
onSeriesParsed = (data: DataFrame[], text: string) => {
|
||||
const { options, onOptionsChange } = this.props;
|
||||
if (!data) {
|
||||
data = [new DataFrameHelper()];
|
||||
data = [new MutableDataFrame()];
|
||||
}
|
||||
// data is a property on 'jsonData'
|
||||
const jsonData = {
|
||||
|
@ -1,6 +1,6 @@
|
||||
import InputDatasource, { describeDataFrame } from './InputDatasource';
|
||||
import { InputQuery, InputOptions } from './types';
|
||||
import { readCSV, DataFrame, DataFrameHelper } from '@grafana/data';
|
||||
import { readCSV, DataFrame, MutableDataFrame } from '@grafana/data';
|
||||
import { DataSourceInstanceSettings, PluginMeta } from '@grafana/ui';
|
||||
import { getQueryOptions } from 'test/helpers/getQueryOptions';
|
||||
|
||||
@ -38,7 +38,7 @@ describe('InputDatasource', () => {
|
||||
expect(describeDataFrame(null)).toEqual('');
|
||||
expect(
|
||||
describeDataFrame([
|
||||
new DataFrameHelper({
|
||||
new MutableDataFrame({
|
||||
name: 'x',
|
||||
fields: [{ name: 'a' }],
|
||||
}),
|
||||
|
@ -6,7 +6,7 @@ import { InputDatasource, describeDataFrame } from './InputDatasource';
|
||||
import { InputQuery, InputOptions } from './types';
|
||||
|
||||
import { FormLabel, Select, QueryEditorProps, TableInputCSV } from '@grafana/ui';
|
||||
import { DataFrame, toCSV, SelectableValue, DataFrameHelper } from '@grafana/data';
|
||||
import { DataFrame, toCSV, SelectableValue, MutableDataFrame } from '@grafana/data';
|
||||
|
||||
import { dataFrameToCSV } from './utils';
|
||||
|
||||
@ -41,7 +41,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
|
||||
}
|
||||
data = [...datasource.data];
|
||||
if (!data) {
|
||||
data = [new DataFrameHelper()];
|
||||
data = [new MutableDataFrame()];
|
||||
}
|
||||
this.setState({ text: toCSV(data) });
|
||||
}
|
||||
@ -53,7 +53,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
|
||||
const { query, onChange, onRunQuery } = this.props;
|
||||
this.setState({ text });
|
||||
if (!data) {
|
||||
data = [new DataFrameHelper()];
|
||||
data = [new MutableDataFrame()];
|
||||
}
|
||||
onChange({ ...query, data });
|
||||
onRunQuery();
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { LokiLogsStream } from './types';
|
||||
import { parseLabels, FieldType, Labels, DataFrameHelper } from '@grafana/data';
|
||||
import { parseLabels, FieldType, Labels, MutableDataFrame } from '@grafana/data';
|
||||
|
||||
export function logStreamToDataFrame(stream: LokiLogsStream, refId?: string): DataFrameHelper {
|
||||
export function logStreamToDataFrame(stream: LokiLogsStream, refId?: string): MutableDataFrame {
|
||||
let labels: Labels = stream.parsedLabels;
|
||||
if (!labels && stream.labels) {
|
||||
labels = parseLabels(stream.labels);
|
||||
@ -14,7 +14,7 @@ export function logStreamToDataFrame(stream: LokiLogsStream, refId?: string): Da
|
||||
lines.push(entry.line);
|
||||
}
|
||||
|
||||
return new DataFrameHelper({
|
||||
return new MutableDataFrame({
|
||||
refId,
|
||||
labels,
|
||||
fields: [
|
||||
|
@ -7,7 +7,7 @@ import {
|
||||
LoadingState,
|
||||
LogLevel,
|
||||
CSVReader,
|
||||
DataFrameHelper,
|
||||
MutableDataFrame,
|
||||
CircularVector,
|
||||
DataFrame,
|
||||
} from '@grafana/data';
|
||||
@ -182,7 +182,7 @@ export class SignalWorker extends StreamWorker {
|
||||
const vals = new CircularVector({ capacity: maxRows });
|
||||
this.values = [times, vals];
|
||||
|
||||
const data = new DataFrameHelper({
|
||||
const data = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: times }, // The time field
|
||||
{ name: 'Value', type: FieldType.number, values: vals },
|
||||
@ -351,7 +351,7 @@ export class LogsWorker extends StreamWorker {
|
||||
const lines = new CircularVector({ capacity: maxRows });
|
||||
|
||||
this.values = [times, lines];
|
||||
this.data = new DataFrameHelper({
|
||||
this.data = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: times },
|
||||
{ name: 'Line', type: FieldType.string, values: lines },
|
||||
|
@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { colors, getColorFromHexRgbOrName } from '@grafana/ui';
|
||||
import { TimeRange, FieldType, Field, DataFrame, DataFrameHelper } from '@grafana/data';
|
||||
import { TimeRange, FieldType, Field, DataFrame, getTimeField } from '@grafana/data';
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
import config from 'app/core/config';
|
||||
|
||||
@ -21,15 +21,17 @@ export class DataProcessor {
|
||||
}
|
||||
|
||||
for (const series of dataList) {
|
||||
const data = new DataFrameHelper(series);
|
||||
const time = data.getFirstFieldOfType(FieldType.time);
|
||||
|
||||
if (!time) {
|
||||
const { timeField } = getTimeField(series);
|
||||
if (!timeField) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const seriesName = series.name ? series.name : series.refId;
|
||||
for (const field of data.getFields(FieldType.number)) {
|
||||
for (const field of series.fields) {
|
||||
if (field.type !== FieldType.number) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let name = field.config && field.config.title ? field.config.title : field.name;
|
||||
|
||||
if (seriesName && dataList.length > 0 && name !== seriesName) {
|
||||
@ -37,8 +39,8 @@ export class DataProcessor {
|
||||
}
|
||||
|
||||
const datapoints = [];
|
||||
for (let r = 0; r < data.length; r++) {
|
||||
datapoints.push([field.values.get(r), time.values.get(r)]);
|
||||
for (let r = 0; r < series.length; r++) {
|
||||
datapoints.push([field.values.get(r), timeField.values.get(r)]);
|
||||
}
|
||||
|
||||
list.push(this.toTimeSeries(field, name, datapoints, list.length, range));
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { colors, getFlotPairs, getColorFromHexRgbOrName, getDisplayProcessor, PanelData } from '@grafana/ui';
|
||||
import { NullValueMode, reduceField, DataFrameHelper, FieldType, DisplayValue, GraphSeriesXY } from '@grafana/data';
|
||||
import { NullValueMode, reduceField, FieldType, DisplayValue, GraphSeriesXY, getTimeField } from '@grafana/data';
|
||||
|
||||
import { SeriesOptions, GraphOptions } from './types';
|
||||
import { GraphLegendEditorLegendOptions } from './GraphLegendEditor';
|
||||
@ -19,16 +19,19 @@ export const getGraphSeriesModel = (
|
||||
});
|
||||
|
||||
for (const series of data.series) {
|
||||
const data = new DataFrameHelper(series);
|
||||
const timeColumn = data.getFirstFieldOfType(FieldType.time);
|
||||
if (!timeColumn) {
|
||||
const { timeField } = getTimeField(series);
|
||||
if (!timeField) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const field of data.getFields(FieldType.number)) {
|
||||
for (const field of series.fields) {
|
||||
if (field.type !== FieldType.number) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Use external calculator just to make sure it works :)
|
||||
const points = getFlotPairs({
|
||||
xField: timeColumn,
|
||||
xField: timeField,
|
||||
yField: field,
|
||||
nullValueMode: NullValueMode.Null,
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user