mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
DataFrame: convert from row based to a columnar value format (#18391)
This commit is contained in:
@@ -2,7 +2,7 @@ import _ from 'lodash';
|
||||
import flatten from 'app/core/utils/flatten';
|
||||
import * as queryDef from './query_def';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import { DataFrame, toDataFrame, FieldType } from '@grafana/data';
|
||||
import { DataFrame, toDataFrame, FieldType, DataFrameHelper } from '@grafana/data';
|
||||
import { DataQueryResponse } from '@grafana/ui';
|
||||
import { ElasticsearchAggregation } from './types';
|
||||
|
||||
@@ -464,33 +464,38 @@ export class ElasticResponse {
|
||||
|
||||
if (docs.length > 0) {
|
||||
propNames = propNames.sort();
|
||||
const series: DataFrame = {
|
||||
fields: [
|
||||
{
|
||||
name: this.targets[0].timeField,
|
||||
type: FieldType.time,
|
||||
},
|
||||
],
|
||||
rows: [],
|
||||
const series = new DataFrameHelper({ fields: [] });
|
||||
|
||||
series.addField({
|
||||
name: this.targets[0].timeField,
|
||||
type: FieldType.time,
|
||||
}).parse = (v: any) => {
|
||||
return v[0] || '';
|
||||
};
|
||||
|
||||
if (logMessageField) {
|
||||
series.fields.push({
|
||||
series.addField({
|
||||
name: logMessageField,
|
||||
type: FieldType.string,
|
||||
});
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
} else {
|
||||
series.fields.push({
|
||||
series.addField({
|
||||
name: '_source',
|
||||
type: FieldType.string,
|
||||
});
|
||||
}).parse = (v: any) => {
|
||||
return JSON.stringify(v, null, 2);
|
||||
};
|
||||
}
|
||||
|
||||
if (logLevelField) {
|
||||
series.fields.push({
|
||||
series.addField({
|
||||
name: 'level',
|
||||
type: FieldType.string,
|
||||
});
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
}
|
||||
|
||||
for (const propName of propNames) {
|
||||
@@ -498,35 +503,17 @@ export class ElasticResponse {
|
||||
continue;
|
||||
}
|
||||
|
||||
series.fields.push({
|
||||
series.addField({
|
||||
name: propName,
|
||||
type: FieldType.string,
|
||||
});
|
||||
}).parse = (v: any) => {
|
||||
return v || '';
|
||||
};
|
||||
}
|
||||
|
||||
// Add a row for each document
|
||||
for (const doc of docs) {
|
||||
const row: any[] = [];
|
||||
row.push(doc[this.targets[0].timeField][0]);
|
||||
|
||||
if (logMessageField) {
|
||||
row.push(doc[logMessageField] || '');
|
||||
} else {
|
||||
row.push(JSON.stringify(doc._source, null, 2));
|
||||
}
|
||||
|
||||
if (logLevelField) {
|
||||
row.push(doc[logLevelField] || '');
|
||||
}
|
||||
|
||||
for (const propName of propNames) {
|
||||
if (doc.hasOwnProperty(propName)) {
|
||||
row.push(doc[propName]);
|
||||
} else {
|
||||
row.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
series.rows.push(row);
|
||||
series.appendRowFrom(doc);
|
||||
}
|
||||
|
||||
dataFrame.push(series);
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { ElasticResponse } from '../elastic_response';
|
||||
import { DataFrameHelper, DataFrameView } from '@grafana/data';
|
||||
import { KeyValue } from '@grafana/ui';
|
||||
|
||||
describe('ElasticResponse', () => {
|
||||
let targets;
|
||||
@@ -858,19 +860,39 @@ describe('ElasticResponse', () => {
|
||||
|
||||
it('should return histogram aggregation and documents', () => {
|
||||
expect(result.data.length).toBe(2);
|
||||
expect(result.data[0].fields).toContainEqual({ name: '@timestamp', type: 'time' });
|
||||
expect(result.data[0].fields).toContainEqual({ name: 'host', type: 'string' });
|
||||
expect(result.data[0].fields).toContainEqual({ name: 'message', type: 'string' });
|
||||
result.data[0].rows.forEach((row: any, i: number) => {
|
||||
const logResults = result.data[0] as DataFrameHelper;
|
||||
const fields = logResults.fields.map(f => {
|
||||
return {
|
||||
name: f.name,
|
||||
type: f.type,
|
||||
};
|
||||
});
|
||||
|
||||
expect(fields).toContainEqual({ name: '@timestamp', type: 'time' });
|
||||
expect(fields).toContainEqual({ name: 'host', type: 'string' });
|
||||
expect(fields).toContainEqual({ name: 'message', type: 'string' });
|
||||
|
||||
let rows = new DataFrameView(logResults);
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const r = rows.get(i);
|
||||
const row = [r._id, r._type, r._index, r._source];
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._id);
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._type);
|
||||
expect(row).toContain(response.responses[0].hits.hits[i]._index);
|
||||
expect(row).toContain(JSON.stringify(response.responses[0].hits.hits[i]._source, undefined, 2));
|
||||
});
|
||||
}
|
||||
|
||||
// Make a map from the histogram results
|
||||
const hist: KeyValue<number> = {};
|
||||
const histogramResults = new DataFrameHelper(result.data[1]);
|
||||
rows = new DataFrameView(histogramResults);
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const row = rows.get(i);
|
||||
hist[row.Time] = row.Count;
|
||||
}
|
||||
|
||||
expect(result.data[1]).toHaveProperty('name', 'Count');
|
||||
response.responses[0].aggregations['2'].buckets.forEach((bucket: any) => {
|
||||
expect(result.data[1].rows).toContainEqual([bucket.doc_count, bucket.key]);
|
||||
expect(hist[bucket.key]).toEqual(bucket.doc_count);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@ import AzureMonitorDatasource from '../datasource';
|
||||
// @ts-ignore
|
||||
import Q from 'q';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { toUtc } from '@grafana/data';
|
||||
import { toUtc, DataFrame } from '@grafana/data';
|
||||
|
||||
describe('AzureMonitorDatasource', () => {
|
||||
const ctx: any = {
|
||||
@@ -132,11 +132,12 @@ describe('AzureMonitorDatasource', () => {
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds.query(options).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
expect(results.data[0].name).toEqual('Percentage CPU');
|
||||
expect(results.data[0].rows[0][1]).toEqual(1558278660000);
|
||||
expect(results.data[0].rows[0][0]).toEqual(2.2075);
|
||||
expect(results.data[0].rows[1][1]).toEqual(1558278720000);
|
||||
expect(results.data[0].rows[1][0]).toEqual(2.29);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(data.name).toEqual('Percentage CPU');
|
||||
expect(data.fields[1].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[0].values.get(0)).toEqual(2.2075);
|
||||
expect(data.fields[1].values.get(1)).toEqual(1558278720000);
|
||||
expect(data.fields[0].values.get(1)).toEqual(2.29);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,7 +5,8 @@ import React, { PureComponent } from 'react';
|
||||
import { InputOptions } from './types';
|
||||
|
||||
import { DataSourcePluginOptionsEditorProps, DataSourceSettings, TableInputCSV } from '@grafana/ui';
|
||||
import { DataFrame, toCSV } from '@grafana/data';
|
||||
import { DataFrame, DataFrameHelper } from '@grafana/data';
|
||||
import { dataFrameToCSV } from './utils';
|
||||
|
||||
type InputSettings = DataSourceSettings<InputOptions>;
|
||||
|
||||
@@ -23,7 +24,7 @@ export class InputConfigEditor extends PureComponent<Props, State> {
|
||||
componentDidMount() {
|
||||
const { options } = this.props;
|
||||
if (options.jsonData.data) {
|
||||
const text = toCSV(options.jsonData.data);
|
||||
const text = dataFrameToCSV(options.jsonData.data);
|
||||
this.setState({ text });
|
||||
}
|
||||
}
|
||||
@@ -31,12 +32,7 @@ export class InputConfigEditor extends PureComponent<Props, State> {
|
||||
onSeriesParsed = (data: DataFrame[], text: string) => {
|
||||
const { options, onOptionsChange } = this.props;
|
||||
if (!data) {
|
||||
data = [
|
||||
{
|
||||
fields: [],
|
||||
rows: [],
|
||||
},
|
||||
];
|
||||
data = [new DataFrameHelper()];
|
||||
}
|
||||
// data is a property on 'jsonData'
|
||||
const jsonData = {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import InputDatasource, { describeDataFrame } from './InputDatasource';
|
||||
import { InputQuery, InputOptions } from './types';
|
||||
import { readCSV } from '@grafana/data';
|
||||
import { readCSV, DataFrame, DataFrameHelper } from '@grafana/data';
|
||||
import { DataSourceInstanceSettings, PluginMeta } from '@grafana/ui';
|
||||
import { getQueryOptions } from 'test/helpers/getQueryOptions';
|
||||
|
||||
@@ -26,9 +26,9 @@ describe('InputDatasource', () => {
|
||||
return ds.query(options).then(rsp => {
|
||||
expect(rsp.data.length).toBe(1);
|
||||
|
||||
const series = rsp.data[0];
|
||||
const series: DataFrame = rsp.data[0];
|
||||
expect(series.refId).toBe('Z');
|
||||
expect(series.rows).toEqual(data[0].rows);
|
||||
expect(series.fields[0].values).toEqual(data[0].fields[0].values);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -38,11 +38,10 @@ describe('InputDatasource', () => {
|
||||
expect(describeDataFrame(null)).toEqual('');
|
||||
expect(
|
||||
describeDataFrame([
|
||||
{
|
||||
new DataFrameHelper({
|
||||
name: 'x',
|
||||
fields: [{ name: 'a' }],
|
||||
rows: [],
|
||||
},
|
||||
}),
|
||||
])
|
||||
).toEqual('1 Fields, 0 Rows');
|
||||
});
|
||||
|
||||
@@ -6,17 +6,19 @@ import {
|
||||
DataSourceInstanceSettings,
|
||||
MetricFindValue,
|
||||
} from '@grafana/ui';
|
||||
import { DataFrame } from '@grafana/data';
|
||||
import { DataFrame, DataFrameDTO, toDataFrame } from '@grafana/data';
|
||||
|
||||
import { InputQuery, InputOptions } from './types';
|
||||
|
||||
export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
|
||||
data: DataFrame[];
|
||||
data: DataFrame[] = [];
|
||||
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<InputOptions>) {
|
||||
super(instanceSettings);
|
||||
|
||||
this.data = instanceSettings.jsonData.data ? instanceSettings.jsonData.data : [];
|
||||
if (instanceSettings.jsonData.data) {
|
||||
this.data = instanceSettings.jsonData.data.map(v => toDataFrame(v));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -47,14 +49,14 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
|
||||
query(options: DataQueryRequest<InputQuery>): Promise<DataQueryResponse> {
|
||||
const results: DataFrame[] = [];
|
||||
for (const query of options.targets) {
|
||||
if (query.hide) {
|
||||
continue;
|
||||
let data = this.data;
|
||||
if (query.data) {
|
||||
data = query.data.map(v => toDataFrame(v));
|
||||
}
|
||||
const data = query.data ? query.data : this.data;
|
||||
for (const series of data) {
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
results.push({
|
||||
...data[i],
|
||||
refId: query.refId,
|
||||
...series,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -66,8 +68,9 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
|
||||
let rowCount = 0;
|
||||
let info = `${this.data.length} Series:`;
|
||||
for (const series of this.data) {
|
||||
info += ` [${series.fields.length} Fields, ${series.rows.length} Rows]`;
|
||||
rowCount += series.rows.length;
|
||||
const length = series.length;
|
||||
info += ` [${series.fields.length} Fields, ${length} Rows]`;
|
||||
rowCount += length;
|
||||
}
|
||||
|
||||
if (rowCount > 0) {
|
||||
@@ -84,13 +87,23 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
|
||||
}
|
||||
}
|
||||
|
||||
export function describeDataFrame(data: DataFrame[]): string {
|
||||
function getLength(data?: DataFrameDTO | DataFrame) {
|
||||
if (!data || !data.fields || !data.fields.length) {
|
||||
return 0;
|
||||
}
|
||||
if (data.hasOwnProperty('length')) {
|
||||
return (data as DataFrame).length;
|
||||
}
|
||||
return data.fields[0].values.length;
|
||||
}
|
||||
|
||||
export function describeDataFrame(data: Array<DataFrameDTO | DataFrame>): string {
|
||||
if (!data || !data.length) {
|
||||
return '';
|
||||
}
|
||||
if (data.length > 1) {
|
||||
const count = data.reduce((acc, series) => {
|
||||
return acc + series.rows.length;
|
||||
return acc + getLength(series);
|
||||
}, 0);
|
||||
return `${data.length} Series, ${count} Rows`;
|
||||
}
|
||||
@@ -98,7 +111,8 @@ export function describeDataFrame(data: DataFrame[]): string {
|
||||
if (!series.fields) {
|
||||
return 'Missing Fields';
|
||||
}
|
||||
return `${series.fields.length} Fields, ${series.rows.length} Rows`;
|
||||
const length = getLength(series);
|
||||
return `${series.fields.length} Fields, ${length} Rows`;
|
||||
}
|
||||
|
||||
export default InputDatasource;
|
||||
|
||||
@@ -6,7 +6,9 @@ import { InputDatasource, describeDataFrame } from './InputDatasource';
|
||||
import { InputQuery, InputOptions } from './types';
|
||||
|
||||
import { FormLabel, Select, QueryEditorProps, TableInputCSV } from '@grafana/ui';
|
||||
import { DataFrame, toCSV, SelectableValue } from '@grafana/data';
|
||||
import { DataFrame, toCSV, SelectableValue, DataFrameHelper } from '@grafana/data';
|
||||
|
||||
import { dataFrameToCSV } from './utils';
|
||||
|
||||
type Props = QueryEditorProps<InputDatasource, InputQuery, InputOptions>;
|
||||
|
||||
@@ -26,7 +28,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
|
||||
|
||||
onComponentDidMount() {
|
||||
const { query } = this.props;
|
||||
const text = query.data ? toCSV(query.data) : '';
|
||||
const text = dataFrameToCSV(query.data);
|
||||
this.setState({ text });
|
||||
}
|
||||
|
||||
@@ -39,12 +41,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
|
||||
}
|
||||
data = [...datasource.data];
|
||||
if (!data) {
|
||||
data = [
|
||||
{
|
||||
fields: [],
|
||||
rows: [],
|
||||
},
|
||||
];
|
||||
data = [new DataFrameHelper()];
|
||||
}
|
||||
this.setState({ text: toCSV(data) });
|
||||
}
|
||||
@@ -56,12 +53,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
|
||||
const { query, onChange, onRunQuery } = this.props;
|
||||
this.setState({ text });
|
||||
if (!data) {
|
||||
data = [
|
||||
{
|
||||
fields: [],
|
||||
rows: [],
|
||||
},
|
||||
];
|
||||
data = [new DataFrameHelper()];
|
||||
}
|
||||
onChange({ ...query, data });
|
||||
onRunQuery();
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { DataQuery, DataSourceJsonData } from '@grafana/ui';
|
||||
import { DataFrame } from '@grafana/data';
|
||||
import { DataFrameDTO } from '@grafana/data';
|
||||
|
||||
export interface InputQuery extends DataQuery {
|
||||
// Data saved in the panel
|
||||
data?: DataFrame[];
|
||||
data?: DataFrameDTO[];
|
||||
}
|
||||
|
||||
export interface InputOptions extends DataSourceJsonData {
|
||||
// Saved in the datasource and download with bootData
|
||||
data?: DataFrame[];
|
||||
data?: DataFrameDTO[];
|
||||
}
|
||||
|
||||
8
public/app/plugins/datasource/input/utils.ts
Normal file
8
public/app/plugins/datasource/input/utils.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { toDataFrame, DataFrameDTO, toCSV } from '@grafana/data';
|
||||
|
||||
export function dataFrameToCSV(dto?: DataFrameDTO[]) {
|
||||
if (!dto || !dto.length) {
|
||||
return '';
|
||||
}
|
||||
return toCSV(dto.map(v => toDataFrame(dto)));
|
||||
}
|
||||
@@ -68,7 +68,7 @@ describe('LokiDatasource', () => {
|
||||
const res = await ds.query(options);
|
||||
|
||||
const dataFrame = res.data[0] as DataFrame;
|
||||
expect(dataFrame.rows[0][1]).toBe('hello');
|
||||
expect(dataFrame.fields[1].values.get(0)).toBe('hello');
|
||||
expect(dataFrame.meta.limit).toBe(20);
|
||||
expect(dataFrame.meta.searchWords).toEqual(['(?i)foo']);
|
||||
done();
|
||||
|
||||
@@ -154,7 +154,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
}
|
||||
|
||||
if (!data.streams) {
|
||||
return [{ ...logStreamToDataFrame(data), refId: target.refId }];
|
||||
return [logStreamToDataFrame(data, target.refId)];
|
||||
}
|
||||
|
||||
for (const stream of data.streams || []) {
|
||||
@@ -330,18 +330,17 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
const series: DataFrame[] = [];
|
||||
|
||||
try {
|
||||
const reverse = options && options.direction === 'FORWARD';
|
||||
const result = await this._request('/api/prom/query', target);
|
||||
if (result.data) {
|
||||
for (const stream of result.data.streams || []) {
|
||||
const dataFrame = logStreamToDataFrame(stream);
|
||||
if (reverse) {
|
||||
dataFrame.reverse();
|
||||
}
|
||||
series.push(dataFrame);
|
||||
}
|
||||
}
|
||||
if (options && options.direction === 'FORWARD') {
|
||||
if (series[0] && series[0].rows) {
|
||||
series[0].rows.reverse();
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
data: series,
|
||||
|
||||
@@ -26,9 +26,9 @@ describe('convert loki response to DataFrame', () => {
|
||||
|
||||
expect(data.length).toBe(2);
|
||||
expect(data[0].labels['foo']).toEqual('bar');
|
||||
expect(data[0].rows[0][0]).toEqual(streams[0].entries[0].ts);
|
||||
expect(data[0].rows[0][1]).toEqual(streams[0].entries[0].line);
|
||||
expect(data[1].rows[0][0]).toEqual(streams[1].entries[0].ts);
|
||||
expect(data[1].rows[0][1]).toEqual(streams[1].entries[0].line);
|
||||
expect(data[0].fields[0].values.get(0)).toEqual(streams[0].entries[0].ts);
|
||||
expect(data[0].fields[1].values.get(0)).toEqual(streams[0].entries[0].line);
|
||||
expect(data[1].fields[0].values.get(0)).toEqual(streams[1].entries[0].ts);
|
||||
expect(data[1].fields[1].values.get(0)).toEqual(streams[1].entries[0].line);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,16 +1,25 @@
|
||||
import { LokiLogsStream } from './types';
|
||||
import { DataFrame, parseLabels, FieldType, Labels } from '@grafana/data';
|
||||
import { parseLabels, FieldType, Labels, DataFrameHelper } from '@grafana/data';
|
||||
|
||||
export function logStreamToDataFrame(stream: LokiLogsStream): DataFrame {
|
||||
export function logStreamToDataFrame(stream: LokiLogsStream, refId?: string): DataFrameHelper {
|
||||
let labels: Labels = stream.parsedLabels;
|
||||
if (!labels && stream.labels) {
|
||||
labels = parseLabels(stream.labels);
|
||||
}
|
||||
return {
|
||||
const time: string[] = [];
|
||||
const lines: string[] = [];
|
||||
|
||||
for (const entry of stream.entries) {
|
||||
time.push(entry.ts || entry.timestamp);
|
||||
lines.push(entry.line);
|
||||
}
|
||||
|
||||
return new DataFrameHelper({
|
||||
refId,
|
||||
labels,
|
||||
fields: [{ name: 'ts', type: FieldType.time }, { name: 'line', type: FieldType.string }],
|
||||
rows: stream.entries.map(entry => {
|
||||
return [entry.ts || entry.timestamp, entry.line];
|
||||
}),
|
||||
};
|
||||
fields: [
|
||||
{ name: 'ts', type: FieldType.time, values: time }, // Time
|
||||
{ name: 'line', type: FieldType.string, values: lines }, // Line
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,7 +1,16 @@
|
||||
import defaults from 'lodash/defaults';
|
||||
import { DataQueryRequest, DataQueryResponse, DataQueryError, DataStreamObserver, DataStreamState } from '@grafana/ui';
|
||||
|
||||
import { FieldType, DataFrame, LoadingState, LogLevel, CSVReader } from '@grafana/data';
|
||||
import {
|
||||
FieldType,
|
||||
Field,
|
||||
LoadingState,
|
||||
LogLevel,
|
||||
CSVReader,
|
||||
DataFrameHelper,
|
||||
CircularVector,
|
||||
DataFrame,
|
||||
} from '@grafana/data';
|
||||
import { TestDataQuery, StreamingQuery } from './types';
|
||||
|
||||
export const defaultQuery: StreamingQuery = {
|
||||
@@ -74,6 +83,10 @@ export class StreamWorker {
|
||||
last = -1;
|
||||
timeoutId = 0;
|
||||
|
||||
// The values within
|
||||
values: CircularVector[] = [];
|
||||
data: DataFrame = { fields: [], length: 0 };
|
||||
|
||||
constructor(key: string, query: TestDataQuery, request: DataQueryRequest, observer: DataStreamObserver) {
|
||||
this.stream = {
|
||||
key,
|
||||
@@ -103,26 +116,25 @@ export class StreamWorker {
|
||||
}
|
||||
this.query = query.stream;
|
||||
this.stream.request = request; // OK?
|
||||
console.log('Reuse Test Stream: ', this);
|
||||
return true;
|
||||
}
|
||||
|
||||
appendRows(append: any[][]) {
|
||||
// Trim the maximum row count
|
||||
const { query, stream } = this;
|
||||
const maxRows = query.buffer ? query.buffer : stream.request.maxDataPoints;
|
||||
const { stream, values, data } = this;
|
||||
|
||||
// Edit the first series
|
||||
const series = stream.data[0];
|
||||
let rows = series.rows.concat(append);
|
||||
const extra = maxRows - rows.length;
|
||||
if (extra < 0) {
|
||||
rows = rows.slice(extra * -1);
|
||||
// Append all rows
|
||||
for (let i = 0; i < append.length; i++) {
|
||||
const row = append[i];
|
||||
for (let j = 0; j < values.length; j++) {
|
||||
values[j].append(row[j]); // Circular buffer will kick out old entries
|
||||
}
|
||||
}
|
||||
series.rows = rows;
|
||||
|
||||
// Tell the event about only the rows that changed (it may want to process them)
|
||||
stream.delta = [{ ...series, rows: append }];
|
||||
// Clear any cached values
|
||||
for (let j = 0; j < data.fields.length; j++) {
|
||||
data.fields[j].calcs = undefined;
|
||||
}
|
||||
stream.data = [data];
|
||||
|
||||
// Broadcast the changes
|
||||
if (this.observer) {
|
||||
@@ -143,7 +155,7 @@ export class SignalWorker extends StreamWorker {
|
||||
constructor(key: string, query: TestDataQuery, request: DataQueryRequest, observer: DataStreamObserver) {
|
||||
super(key, query, request, observer);
|
||||
setTimeout(() => {
|
||||
this.stream.data = [this.initBuffer(query.refId)];
|
||||
this.initBuffer(query.refId);
|
||||
this.looper();
|
||||
}, 10);
|
||||
|
||||
@@ -162,33 +174,46 @@ export class SignalWorker extends StreamWorker {
|
||||
return row;
|
||||
};
|
||||
|
||||
initBuffer(refId: string): DataFrame {
|
||||
initBuffer(refId: string) {
|
||||
const { speed, buffer } = this.query;
|
||||
const data = {
|
||||
fields: [{ name: 'Time', type: FieldType.time }, { name: 'Value', type: FieldType.number }],
|
||||
rows: [],
|
||||
const request = this.stream.request;
|
||||
const maxRows = buffer ? buffer : request.maxDataPoints;
|
||||
const times = new CircularVector(new Array<number>(maxRows));
|
||||
const vals = new CircularVector(new Array<number>(maxRows));
|
||||
this.values = [times, vals];
|
||||
|
||||
const data = new DataFrameHelper({
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: times }, // The time field
|
||||
{ name: 'Value', type: FieldType.number, values: vals },
|
||||
],
|
||||
refId,
|
||||
name: 'Signal ' + refId,
|
||||
} as DataFrame;
|
||||
});
|
||||
|
||||
for (let i = 0; i < this.bands; i++) {
|
||||
const suffix = this.bands > 1 ? ` ${i + 1}` : '';
|
||||
data.fields.push({ name: 'Min' + suffix, type: FieldType.number });
|
||||
data.fields.push({ name: 'Max' + suffix, type: FieldType.number });
|
||||
const min = new CircularVector(new Array<number>(maxRows));
|
||||
const max = new CircularVector(new Array<number>(maxRows));
|
||||
this.values.push(min);
|
||||
this.values.push(max);
|
||||
|
||||
data.addField({ name: 'Min' + suffix, type: FieldType.number, values: min });
|
||||
data.addField({ name: 'Max' + suffix, type: FieldType.number, values: max });
|
||||
}
|
||||
|
||||
console.log('START', data);
|
||||
|
||||
const request = this.stream.request;
|
||||
|
||||
this.value = Math.random() * 100;
|
||||
const maxRows = buffer ? buffer : request.maxDataPoints;
|
||||
let time = Date.now() - maxRows * speed;
|
||||
for (let i = 0; i < maxRows; i++) {
|
||||
data.rows.push(this.nextRow(time));
|
||||
const row = this.nextRow(time);
|
||||
for (let j = 0; j < this.values.length; j++) {
|
||||
this.values[j].append(row[j]);
|
||||
}
|
||||
time += speed;
|
||||
}
|
||||
return data;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
looper = () => {
|
||||
@@ -251,9 +276,10 @@ export class FetchWorker extends StreamWorker {
|
||||
return this.reader.read().then(this.processChunk);
|
||||
};
|
||||
|
||||
onHeader = (series: DataFrame) => {
|
||||
series.refId = this.refId;
|
||||
this.stream.data = [series];
|
||||
onHeader = (fields: Field[]) => {
|
||||
console.warn('TODO!!!', fields);
|
||||
// series.refId = this.refId;
|
||||
// this.stream.data = [series];
|
||||
};
|
||||
|
||||
onRow = (row: any[]) => {
|
||||
@@ -269,7 +295,7 @@ export class LogsWorker extends StreamWorker {
|
||||
super(key, query, request, observer);
|
||||
|
||||
window.setTimeout(() => {
|
||||
this.stream.data = [this.initBuffer(query.refId)];
|
||||
this.initBuffer(query.refId);
|
||||
this.looper();
|
||||
}, 10);
|
||||
}
|
||||
@@ -314,24 +340,34 @@ export class LogsWorker extends StreamWorker {
|
||||
return [time, '[' + this.getRandomLogLevel() + '] ' + this.getRandomLine()];
|
||||
};
|
||||
|
||||
initBuffer(refId: string): DataFrame {
|
||||
initBuffer(refId: string) {
|
||||
const { speed, buffer } = this.query;
|
||||
const data = {
|
||||
fields: [{ name: 'Time', type: FieldType.time }, { name: 'Line', type: FieldType.string }],
|
||||
rows: [],
|
||||
refId,
|
||||
name: 'Logs ' + refId,
|
||||
} as DataFrame;
|
||||
|
||||
const request = this.stream.request;
|
||||
|
||||
const maxRows = buffer ? buffer : request.maxDataPoints;
|
||||
|
||||
const times = new CircularVector(new Array(maxRows));
|
||||
const lines = new CircularVector(new Array(maxRows));
|
||||
|
||||
this.values = [times, lines];
|
||||
this.data = new DataFrameHelper({
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: times },
|
||||
{ name: 'Line', type: FieldType.string, values: lines },
|
||||
],
|
||||
refId,
|
||||
name: 'Logs ' + refId,
|
||||
});
|
||||
|
||||
// Fill up the buffer
|
||||
let time = Date.now() - maxRows * speed;
|
||||
for (let i = 0; i < maxRows; i++) {
|
||||
data.rows.push(this.nextRow(time));
|
||||
const row = this.nextRow(time);
|
||||
times.append(row[0]);
|
||||
lines.append(row[1]);
|
||||
time += speed;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
looper = () => {
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
PanelEditorProps,
|
||||
Select,
|
||||
} from '@grafana/ui';
|
||||
import { Field } from '@grafana/data';
|
||||
import { FieldConfig } from '@grafana/data';
|
||||
|
||||
import { Threshold, ValueMapping } from '@grafana/data';
|
||||
import { BarGaugeOptions, orientationOptions, displayModes } from './types';
|
||||
@@ -41,7 +41,7 @@ export class BarGaugePanelEditor extends PureComponent<PanelEditorProps<BarGauge
|
||||
fieldOptions,
|
||||
});
|
||||
|
||||
onDefaultsChange = (field: Partial<Field>) => {
|
||||
onDefaultsChange = (field: FieldConfig) => {
|
||||
this.onDisplayOptionsChanged({
|
||||
...this.props.options.fieldOptions,
|
||||
defaults: field,
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
Switch,
|
||||
PanelOptionsGroup,
|
||||
} from '@grafana/ui';
|
||||
import { Field, Threshold, ValueMapping } from '@grafana/data';
|
||||
import { Threshold, ValueMapping, FieldConfig } from '@grafana/data';
|
||||
|
||||
import { GaugeOptions } from './types';
|
||||
|
||||
@@ -49,7 +49,7 @@ export class GaugePanelEditor extends PureComponent<PanelEditorProps<GaugeOption
|
||||
fieldOptions,
|
||||
});
|
||||
|
||||
onDefaultsChange = (field: Partial<Field>) => {
|
||||
onDefaultsChange = (field: FieldConfig) => {
|
||||
this.onDisplayOptionsChanged({
|
||||
...this.props.options.fieldOptions,
|
||||
defaults: field,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import _ from 'lodash';
|
||||
import { colors, getColorFromHexRgbOrName } from '@grafana/ui';
|
||||
import { TimeRange, FieldCache, FieldType, Field, DataFrame } from '@grafana/data';
|
||||
import { TimeRange, FieldType, Field, DataFrame, DataFrameHelper } from '@grafana/data';
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
import config from 'app/core/config';
|
||||
|
||||
@@ -21,35 +21,24 @@ export class DataProcessor {
|
||||
}
|
||||
|
||||
for (const series of dataList) {
|
||||
const { fields } = series;
|
||||
const cache = new FieldCache(fields);
|
||||
const time = cache.getFirstFieldOfType(FieldType.time);
|
||||
const data = new DataFrameHelper(series);
|
||||
const time = data.getFirstFieldOfType(FieldType.time);
|
||||
|
||||
if (!time) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const seriesName = series.name ? series.name : series.refId;
|
||||
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
if (fields[i].type !== FieldType.number) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const field = fields[i];
|
||||
let name = field.title;
|
||||
|
||||
if (!field.title) {
|
||||
name = field.name;
|
||||
}
|
||||
for (const field of data.getFields(FieldType.number)) {
|
||||
let name = field.config && field.config.title ? field.config.title : field.name;
|
||||
|
||||
if (seriesName && dataList.length > 0 && name !== seriesName) {
|
||||
name = seriesName + ' ' + name;
|
||||
}
|
||||
|
||||
const datapoints = [];
|
||||
for (const row of series.rows) {
|
||||
datapoints.push([row[i], row[time.index]]);
|
||||
for (let r = 0; r < data.length; r++) {
|
||||
datapoints.push([field.values.get(r), time.values.get(r)]);
|
||||
}
|
||||
|
||||
list.push(this.toTimeSeries(field, name, datapoints, list.length, range));
|
||||
@@ -76,7 +65,7 @@ export class DataProcessor {
|
||||
datapoints: datapoints || [],
|
||||
alias: alias,
|
||||
color: getColorFromHexRgbOrName(color, config.theme.type),
|
||||
unit: field.unit,
|
||||
unit: field.config ? field.config.unit : undefined,
|
||||
});
|
||||
|
||||
if (datapoints && datapoints.length > 0 && range) {
|
||||
|
||||
@@ -34,12 +34,11 @@ describe('Graph DataProcessor', () => {
|
||||
{
|
||||
name: 'series',
|
||||
fields: [
|
||||
{ name: 'v1' }, // first
|
||||
{ name: 'v2' }, // second
|
||||
{ name: 'string' }, // skip
|
||||
{ name: 'time' }, // Time is last column
|
||||
{ name: 'v1', values: [0.1, 0.2, 0.3] }, // first
|
||||
{ name: 'v2', values: [1.1, 2.2, 3.3] }, // second
|
||||
{ name: 'string', values: ['a', 'b', 'c'] }, // skip
|
||||
{ name: 'time', values: [1001, 1002, 1003] }, // Time is last column
|
||||
],
|
||||
rows: [[0.1, 1.1, 'a', 1001], [0.2, 2.2, 'b', 1002], [0.3, 3.3, 'c', 1003]],
|
||||
},
|
||||
]);
|
||||
|
||||
@@ -47,6 +46,7 @@ describe('Graph DataProcessor', () => {
|
||||
panel.xaxis.mode = 'series';
|
||||
const series = processor.getSeriesList({ dataList });
|
||||
expect(series.length).toEqual(5);
|
||||
|
||||
expect(series).toMatchSnapshot();
|
||||
});
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { colors, getFlotPairs, getColorFromHexRgbOrName, getDisplayProcessor, PanelData } from '@grafana/ui';
|
||||
import { NullValueMode, reduceField, FieldCache, FieldType, DisplayValue, GraphSeriesXY } from '@grafana/data';
|
||||
import { NullValueMode, reduceField, DataFrameHelper, FieldType, DisplayValue, GraphSeriesXY } from '@grafana/data';
|
||||
|
||||
import { SeriesOptions, GraphOptions } from './types';
|
||||
import { GraphLegendEditorLegendOptions } from './GraphLegendEditor';
|
||||
@@ -19,29 +19,22 @@ export const getGraphSeriesModel = (
|
||||
});
|
||||
|
||||
for (const series of data.series) {
|
||||
const fieldCache = new FieldCache(series.fields);
|
||||
const timeColumn = fieldCache.getFirstFieldOfType(FieldType.time);
|
||||
const data = new DataFrameHelper(series);
|
||||
const timeColumn = data.getFirstFieldOfType(FieldType.time);
|
||||
if (!timeColumn) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const numberFields = fieldCache.getFields(FieldType.number);
|
||||
for (let i = 0; i < numberFields.length; i++) {
|
||||
const field = numberFields[i];
|
||||
for (const field of data.getFields(FieldType.number)) {
|
||||
// Use external calculator just to make sure it works :)
|
||||
const points = getFlotPairs({
|
||||
rows: series.rows,
|
||||
xIndex: timeColumn.index,
|
||||
yIndex: field.index,
|
||||
xField: timeColumn,
|
||||
yField: field,
|
||||
nullValueMode: NullValueMode.Null,
|
||||
});
|
||||
|
||||
if (points.length > 0) {
|
||||
const seriesStats = reduceField({
|
||||
series,
|
||||
reducers: legendOptions.stats,
|
||||
fieldIndex: field.index,
|
||||
});
|
||||
const seriesStats = reduceField({ field, reducers: legendOptions.stats });
|
||||
let statsDisplayValues: DisplayValue[];
|
||||
|
||||
if (legendOptions.stats) {
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
FieldPropertiesEditor,
|
||||
PanelOptionsGroup,
|
||||
} from '@grafana/ui';
|
||||
import { ValueMapping, Field } from '@grafana/data';
|
||||
import { ValueMapping, FieldConfig } from '@grafana/data';
|
||||
|
||||
import { PieChartOptionsBox } from './PieChartOptionsBox';
|
||||
import { PieChartOptions } from './types';
|
||||
@@ -28,7 +28,7 @@ export class PieChartPanelEditor extends PureComponent<PanelEditorProps<PieChart
|
||||
fieldOptions,
|
||||
});
|
||||
|
||||
onDefaultsChange = (field: Partial<Field>) => {
|
||||
onDefaultsChange = (field: FieldConfig) => {
|
||||
this.onDisplayOptionsChanged({
|
||||
...this.props.options.fieldOptions,
|
||||
defaults: field,
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
FieldPropertiesEditor,
|
||||
PanelOptionsGroup,
|
||||
} from '@grafana/ui';
|
||||
import { Threshold, ValueMapping, Field } from '@grafana/data';
|
||||
import { Threshold, ValueMapping, FieldConfig } from '@grafana/data';
|
||||
|
||||
import { SingleStatOptions, SparklineOptions } from './types';
|
||||
import { ColoringEditor } from './ColoringEditor';
|
||||
@@ -46,7 +46,7 @@ export class SingleStatEditor extends PureComponent<PanelEditorProps<SingleStatO
|
||||
sparkline,
|
||||
});
|
||||
|
||||
onDefaultsChange = (field: Partial<Field>) => {
|
||||
onDefaultsChange = (field: FieldConfig) => {
|
||||
this.onDisplayOptionsChanged({
|
||||
...this.props.options.fieldOptions,
|
||||
override: field,
|
||||
|
||||
Reference in New Issue
Block a user