Refactor: rename SeriesData to DataFrame (#17854)

This commit is contained in:
Ryan McKinley 2019-07-01 12:00:29 -07:00 committed by GitHub
parent c9f4cf8b53
commit e7c3d0ed97
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 231 additions and 231 deletions

View File

@ -4,7 +4,7 @@ import { Table } from './Table';
import { getTheme } from '../../themes';
import { migratedTestTable, migratedTestStyles, simpleTable } from './examples';
import { ScopedVars, SeriesData, GrafanaThemeType } from '../../types/index';
import { ScopedVars, DataFrame, GrafanaThemeType } from '../../types/index';
import { withFullSizeStory } from '../../utils/storybook/withFullSizeStory';
import { number, boolean } from '@storybook/addon-knobs';
@ -29,7 +29,7 @@ export function columnIndexToLeter(column: number) {
return String.fromCharCode(A + c2);
}
export function makeDummyTable(columnCount: number, rowCount: number): SeriesData {
export function makeDummyTable(columnCount: number, rowCount: number): DataFrame {
return {
fields: Array.from(new Array(columnCount), (x, i) => {
return {

View File

@ -12,7 +12,7 @@ import {
} from 'react-virtualized';
import { Themeable } from '../../types/theme';
import { sortSeriesData } from '../../utils/processSeriesData';
import { sortDataFrame } from '../../utils/processDataFrame';
import {
TableCellBuilder,
@ -22,11 +22,11 @@ import {
simpleCellBuilder,
} from './TableCellBuilder';
import { stringToJsRegex } from '@grafana/data';
import { SeriesData } from '../../types/data';
import { DataFrame } from '../../types/data';
import { InterpolateFunction } from '../../types/panel';
export interface Props extends Themeable {
data: SeriesData;
data: DataFrame;
minColumnWidth: number;
showHeader: boolean;
@ -44,7 +44,7 @@ export interface Props extends Themeable {
interface State {
sortBy?: number;
sortDirection?: SortDirectionType;
data: SeriesData;
data: DataFrame;
}
interface ColumnRenderInfo {
@ -115,7 +115,7 @@ export class Table extends Component<Props, State> {
// Update the data when data or sort changes
if (dataChanged || sortBy !== prevState.sortBy || sortDirection !== prevState.sortDirection) {
this.scrollToTop = true;
this.setState({ data: sortSeriesData(data, sortBy, sortDirection === 'DESC') });
this.setState({ data: sortDataFrame(data, sortBy, sortDirection === 'DESC') });
}
}
@ -170,7 +170,7 @@ export class Table extends Component<Props, State> {
this.setState({ sortBy: sort, sortDirection: dir });
};
/** Converts the grid coordinates to SeriesData coordinates */
/** Converts the grid coordinates to DataFrame coordinates */
getCellRef = (rowIndex: number, columnIndex: number): DataIndex => {
const { showHeader, rotate } = this.props;
const rowOffset = showHeader ? -1 : 0;

View File

@ -3,7 +3,7 @@ import React from 'react';
import { storiesOf } from '@storybook/react';
import TableInputCSV from './TableInputCSV';
import { action } from '@storybook/addon-actions';
import { SeriesData } from '../../types/data';
import { DataFrame } from '../../types/data';
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
const TableInputStories = storiesOf('UI/Table/Input', module);
@ -16,7 +16,7 @@ TableInputStories.add('default', () => {
width={400}
height={'90vh'}
text={'a,b,c\n1,2,3'}
onSeriesParsed={(data: SeriesData[], text: string) => {
onSeriesParsed={(data: DataFrame[], text: string) => {
console.log('Data', data, text);
action('Data')(data, text);
}}

View File

@ -2,7 +2,7 @@ import React from 'react';
import renderer from 'react-test-renderer';
import TableInputCSV from './TableInputCSV';
import { SeriesData } from '../../types/data';
import { DataFrame } from '../../types/data';
describe('TableInputCSV', () => {
it('renders correctly', () => {
@ -12,7 +12,7 @@ describe('TableInputCSV', () => {
width={'100%'}
height={200}
text={'a,b,c\n1,2,3'}
onSeriesParsed={(data: SeriesData[], text: string) => {
onSeriesParsed={(data: DataFrame[], text: string) => {
// console.log('Table:', table, 'from:', text);
}}
/>

View File

@ -1,6 +1,6 @@
import React from 'react';
import debounce from 'lodash/debounce';
import { SeriesData } from '../../types/data';
import { DataFrame } from '../../types/data';
import { CSVConfig, readCSV } from '../../utils/csv';
interface Props {
@ -8,12 +8,12 @@ interface Props {
text: string;
width: string | number;
height: string | number;
onSeriesParsed: (data: SeriesData[], text: string) => void;
onSeriesParsed: (data: DataFrame[], text: string) => void;
}
interface State {
text: string;
data: SeriesData[];
data: DataFrame[];
}
/**

View File

@ -1,4 +1,4 @@
import { SeriesData } from '../../types/data';
import { DataFrame } from '../../types/data';
import { ColumnStyle } from './TableCellBuilder';
import { getColorDefinitionByName } from '../../utils/namedColorsPalette';
@ -22,7 +22,7 @@ export const migratedTestTable = {
{ name: 'RangeMappingColored' },
],
rows: [[1388556366666, 1230, 40, undefined, '', '', 'my.host.com', 'host1', ['value1', 'value2'], 1, 2, 1, 2]],
} as SeriesData;
} as DataFrame;
export const migratedTestStyles: ColumnStyle[] = [
{

View File

@ -55,7 +55,7 @@ export interface Labels {
[key: string]: string;
}
export interface SeriesData extends QueryResultBase {
export interface DataFrame extends QueryResultBase {
name?: string;
fields: Field[];
rows: any[][];

View File

@ -1,7 +1,7 @@
import { ComponentType, ComponentClass } from 'react';
import { TimeRange, RawTimeRange } from './time';
import { PluginMeta, GrafanaPlugin } from './plugin';
import { TableData, TimeSeries, SeriesData, LoadingState } from './data';
import { TableData, TimeSeries, DataFrame, LoadingState } from './data';
import { PanelData } from './panel';
import { LogRowModel } from './logs';
@ -284,11 +284,11 @@ export interface ExploreStartPageProps {
}
/**
* Starting in v6.2 SeriesData can represent both TimeSeries and TableData
* Starting in v6.2 DataFrame can represent both TimeSeries and TableData
*/
export type LegacyResponseData = TimeSeries | TableData | any;
export type DataQueryResponseData = SeriesData | LegacyResponseData;
export type DataQueryResponseData = DataFrame | LegacyResponseData;
export type DataStreamObserver = (event: DataStreamState) => void;
@ -313,7 +313,7 @@ export interface DataStreamState {
/**
* Series data may not be known yet
*/
series?: SeriesData[];
series?: DataFrame[];
/**
* Error in stream (but may still be running)
@ -323,7 +323,7 @@ export interface DataStreamState {
/**
* Optionally return only the rows that changed in this event
*/
delta?: SeriesData[];
delta?: DataFrame[];
/**
* Stop listening to this stream

View File

@ -1,5 +1,5 @@
import { ComponentClass, ComponentType } from 'react';
import { LoadingState, SeriesData } from './data';
import { LoadingState, DataFrame } from './data';
import { TimeRange } from './time';
import { ScopedVars, DataQueryRequest, DataQueryError, LegacyResponseData } from './datasource';
import { PluginMeta, GrafanaPlugin } from './plugin';
@ -14,7 +14,7 @@ export interface PanelPluginMeta extends PluginMeta {
export interface PanelData {
state: LoadingState;
series: SeriesData[];
series: DataFrame[];
request?: DataQueryRequest;
error?: DataQueryError;

View File

@ -4,8 +4,8 @@ import defaults from 'lodash/defaults';
import isNumber from 'lodash/isNumber';
// Types
import { SeriesData, Field, FieldType } from '../types/index';
import { guessFieldTypeFromValue } from './processSeriesData';
import { DataFrame, Field, FieldType } from '../types/index';
import { guessFieldTypeFromValue } from './processDataFrame';
export enum CSVHeaderStyle {
full,
@ -28,7 +28,7 @@ export interface CSVParseCallbacks {
* This can return a modified table to force any
* Column configurations
*/
onHeader: (table: SeriesData) => void;
onHeader: (table: DataFrame) => void;
// Called after each row is read and
onRow: (row: any[]) => void;
@ -39,7 +39,7 @@ export interface CSVOptions {
callback?: CSVParseCallbacks;
}
export function readCSV(csv: string, options?: CSVOptions): SeriesData[] {
export function readCSV(csv: string, options?: CSVOptions): DataFrame[] {
return new CSVReader(options).readCSV(csv);
}
@ -56,9 +56,9 @@ export class CSVReader {
callback?: CSVParseCallbacks;
field: FieldParser[];
series: SeriesData;
series: DataFrame;
state: ParseState;
data: SeriesData[];
data: DataFrame[];
constructor(options?: CSVOptions) {
if (!options) {
@ -193,7 +193,7 @@ export class CSVReader {
}
};
readCSV(text: string): SeriesData[] {
readCSV(text: string): DataFrame[] {
this.data = [this.series];
const papacfg = {
@ -315,7 +315,7 @@ function getHeaderLine(key: string, fields: Field[], config: CSVConfig): string
return '';
}
export function toCSV(data: SeriesData[], config?: CSVConfig): string {
export function toCSV(data: DataFrame[], config?: CSVConfig): string {
if (!data) {
return '';
}

View File

@ -8,7 +8,7 @@ import {
FieldType,
NullValueMode,
GrafanaTheme,
SeriesData,
DataFrame,
InterpolateFunction,
Field,
ScopedVars,
@ -36,7 +36,7 @@ export const VAR_FIELD_NAME = '__field_name';
export const VAR_CALC = '__calc';
export const VAR_CELL_PREFIX = '__cell_'; // consistent with existing table templates
function getTitleTemplate(title: string | undefined, stats: string[], data?: SeriesData[]): string {
function getTitleTemplate(title: string | undefined, stats: string[], data?: DataFrame[]): string {
// If the title exists, use it as a template variable
if (title) {
return title;
@ -72,7 +72,7 @@ export interface FieldDisplay {
}
export interface GetFieldDisplayValuesOptions {
data?: SeriesData[];
data?: DataFrame[];
fieldOptions: FieldDisplayOptions;
replaceVariables: InterpolateFunction;
sparkline?: boolean; // Calculate the sparkline

View File

@ -1,7 +1,7 @@
// Libraries
import isNumber from 'lodash/isNumber';
import { SeriesData, NullValueMode } from '../types/index';
import { DataFrame, NullValueMode } from '../types/index';
export enum ReducerID {
sum = 'sum',
@ -29,7 +29,7 @@ export interface FieldCalcs {
}
// Internal function
type FieldReducer = (data: SeriesData, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
type FieldReducer = (data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
export interface FieldReducerInfo {
id: string;
@ -64,7 +64,7 @@ export function getFieldReducers(ids?: string[]): FieldReducerInfo[] {
}
interface ReduceFieldOptions {
series: SeriesData;
series: DataFrame;
fieldIndex: number;
reducers: string[]; // The stats to calculate
nullValueMode?: NullValueMode;
@ -222,7 +222,7 @@ function getById(id: string): FieldReducerInfo | undefined {
return index[id];
}
function doStandardCalcs(data: SeriesData, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
const calcs = {
sum: 0,
max: -Number.MAX_VALUE,
@ -340,16 +340,16 @@ function doStandardCalcs(data: SeriesData, fieldIndex: number, ignoreNulls: bool
return calcs;
}
function calculateFirst(data: SeriesData, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
function calculateFirst(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
return { first: data.rows[0][fieldIndex] };
}
function calculateLast(data: SeriesData, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
function calculateLast(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
return { last: data.rows[data.rows.length - 1][fieldIndex] };
}
function calculateChangeCount(
data: SeriesData,
data: DataFrame,
fieldIndex: number,
ignoreNulls: boolean,
nullAsZero: boolean
@ -378,7 +378,7 @@ function calculateChangeCount(
}
function calculateDistinctCount(
data: SeriesData,
data: DataFrame,
fieldIndex: number,
ignoreNulls: boolean,
nullAsZero: boolean

View File

@ -1,8 +1,8 @@
// Types
import { NullValueMode, GraphSeriesValue, SeriesData } from '../types/index';
import { NullValueMode, GraphSeriesValue, DataFrame } from '../types/index';
export interface FlotPairsOptions {
series: SeriesData;
series: DataFrame;
xIndex: number;
yIndex: number;
nullValueMode?: NullValueMode;

View File

@ -1,4 +1,4 @@
export * from './processSeriesData';
export * from './processDataFrame';
export * from './valueFormats/valueFormats';
export * from './colors';
export * from './namedColorsPalette';

View File

@ -1,5 +1,5 @@
import { LogLevel } from '../types/logs';
import { SeriesData, FieldType } from '../types/data';
import { DataFrame, FieldType } from '../types/data';
/**
* Returns the log level of a log line.
@ -32,7 +32,7 @@ export function getLogLevelFromKey(key: string): LogLevel {
return LogLevel.unknown;
}
export function addLogLevelToSeries(series: SeriesData, lineIndex: number): SeriesData {
export function addLogLevelToSeries(series: DataFrame, lineIndex: number): DataFrame {
return {
...series, // Keeps Tags, RefID etc
fields: [...series.fields, { name: 'LogLevel', type: FieldType.string }],

View File

@ -1,21 +1,21 @@
import {
isSeriesData,
isDataFrame,
toLegacyResponseData,
isTableData,
toSeriesData,
toDataFrame,
guessFieldTypes,
guessFieldTypeFromValue,
} from './processSeriesData';
import { FieldType, TimeSeries, SeriesData, TableData } from '../types/data';
} from './processDataFrame';
import { FieldType, TimeSeries, DataFrame, TableData } from '../types/data';
import { dateTime } from './moment_wrapper';
describe('toSeriesData', () => {
describe('toDataFrame', () => {
it('converts timeseries to series', () => {
const input1 = {
target: 'Field Name',
datapoints: [[100, 1], [200, 2]],
};
let series = toSeriesData(input1);
let series = toDataFrame(input1);
expect(series.fields[0].name).toBe(input1.target);
expect(series.rows).toBe(input1.datapoints);
@ -25,16 +25,16 @@ describe('toSeriesData', () => {
target: '',
datapoints: [[100, 1], [200, 2]],
};
series = toSeriesData(input2);
series = toDataFrame(input2);
expect(series.fields[0].name).toEqual('Value');
});
it('keeps seriesData unchanged', () => {
it('keeps dataFrame unchanged', () => {
const input = {
fields: [{ text: 'A' }, { text: 'B' }, { text: 'C' }],
rows: [[100, 'A', 1], [200, 'B', 2], [300, 'C', 3]],
};
const series = toSeriesData(input);
const series = toDataFrame(input);
expect(series).toBe(input);
});
@ -77,12 +77,12 @@ describe('SerisData backwards compatibility', () => {
target: 'Field Name',
datapoints: [[100, 1], [200, 2]],
};
const series = toSeriesData(timeseries);
expect(isSeriesData(timeseries)).toBeFalsy();
expect(isSeriesData(series)).toBeTruthy();
const series = toDataFrame(timeseries);
expect(isDataFrame(timeseries)).toBeFalsy();
expect(isDataFrame(series)).toBeTruthy();
const roundtrip = toLegacyResponseData(series) as TimeSeries;
expect(isSeriesData(roundtrip)).toBeFalsy();
expect(isDataFrame(roundtrip)).toBeFalsy();
expect(roundtrip.target).toBe(timeseries.target);
});
@ -91,17 +91,17 @@ describe('SerisData backwards compatibility', () => {
columns: [{ text: 'a', unit: 'ms' }, { text: 'b', unit: 'zz' }, { text: 'c', unit: 'yy' }],
rows: [[100, 1, 'a'], [200, 2, 'a']],
};
const series = toSeriesData(table);
const series = toDataFrame(table);
expect(isTableData(table)).toBeTruthy();
expect(isSeriesData(series)).toBeTruthy();
expect(isDataFrame(series)).toBeTruthy();
const roundtrip = toLegacyResponseData(series) as TimeSeries;
expect(isTableData(roundtrip)).toBeTruthy();
expect(roundtrip).toMatchObject(table);
});
it('converts SeriesData to TableData to series and back again', () => {
const series: SeriesData = {
it('converts DataFrame to TableData to series and back again', () => {
const series: DataFrame = {
refId: 'Z',
meta: {
somethign: 8,

View File

@ -4,10 +4,10 @@ import isString from 'lodash/isString';
import isBoolean from 'lodash/isBoolean';
// Types
import { SeriesData, Field, TimeSeries, FieldType, TableData, Column } from '../types/index';
import { DataFrame, Field, TimeSeries, FieldType, TableData, Column } from '../types/index';
import { isDateTime } from './moment_wrapper';
function convertTableToSeriesData(table: TableData): SeriesData {
function convertTableToDataFrame(table: TableData): DataFrame {
return {
// rename the 'text' to 'name' field
fields: table.columns.map(c => {
@ -23,7 +23,7 @@ function convertTableToSeriesData(table: TableData): SeriesData {
};
}
function convertTimeSeriesToSeriesData(timeSeries: TimeSeries): SeriesData {
function convertTimeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
return {
name: timeSeries.target,
fields: [
@ -84,7 +84,7 @@ export function guessFieldTypeFromValue(v: any): FieldType {
/**
* Looks at the data to guess the column type. This ignores any existing setting
*/
export function guessFieldTypeFromSeries(series: SeriesData, index: number): FieldType | undefined {
export function guessFieldTypeFromSeries(series: DataFrame, index: number): FieldType | undefined {
const column = series.fields[index];
// 1. Use the column name to guess
@ -111,7 +111,7 @@ export function guessFieldTypeFromSeries(series: SeriesData, index: number): Fie
* @returns a copy of the series with the best guess for each field type
* If the series already has field types defined, they will be used
*/
export const guessFieldTypes = (series: SeriesData): SeriesData => {
export const guessFieldTypes = (series: DataFrame): DataFrame => {
for (let i = 0; i < series.fields.length; i++) {
if (!series.fields[i].type) {
// Somethign is missing a type return a modified copy
@ -134,26 +134,26 @@ export const guessFieldTypes = (series: SeriesData): SeriesData => {
return series;
};
export const isTableData = (data: any): data is SeriesData => data && data.hasOwnProperty('columns');
export const isTableData = (data: any): data is DataFrame => data && data.hasOwnProperty('columns');
export const isSeriesData = (data: any): data is SeriesData => data && data.hasOwnProperty('fields');
export const isDataFrame = (data: any): data is DataFrame => data && data.hasOwnProperty('fields');
export const toSeriesData = (data: any): SeriesData => {
export const toDataFrame = (data: any): DataFrame => {
if (data.hasOwnProperty('fields')) {
return data as SeriesData;
return data as DataFrame;
}
if (data.hasOwnProperty('datapoints')) {
return convertTimeSeriesToSeriesData(data);
return convertTimeSeriesToDataFrame(data);
}
if (data.hasOwnProperty('columns')) {
return convertTableToSeriesData(data);
return convertTableToDataFrame(data);
}
// TODO, try to convert JSON/Array to seriesta?
console.warn('Can not convert', data);
throw new Error('Unsupported data format');
};
export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData => {
export const toLegacyResponseData = (series: DataFrame): TimeSeries | TableData => {
const { fields, rows } = series;
if (fields.length === 2) {
@ -182,7 +182,7 @@ export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData
};
};
export function sortSeriesData(data: SeriesData, sortIndex?: number, reverse = false): SeriesData {
export function sortDataFrame(data: DataFrame, sortIndex?: number, reverse = false): DataFrame {
if (isNumber(sortIndex)) {
const copy = {
...data,

View File

@ -6,7 +6,7 @@ import {
TimeSeries,
Labels,
LogLevel,
SeriesData,
DataFrame,
findCommonLabels,
findUniqueLabels,
getLogLevel,
@ -250,15 +250,15 @@ export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): Time
});
}
function isLogsData(series: SeriesData) {
function isLogsData(series: DataFrame) {
return series.fields.some(f => f.type === FieldType.time) && series.fields.some(f => f.type === FieldType.string);
}
export function seriesDataToLogsModel(seriesData: SeriesData[], intervalMs: number): LogsModel {
const metricSeries: SeriesData[] = [];
const logSeries: SeriesData[] = [];
export function dataFrameToLogsModel(dataFrame: DataFrame[], intervalMs: number): LogsModel {
const metricSeries: DataFrame[] = [];
const logSeries: DataFrame[] = [];
for (const series of seriesData) {
for (const series of dataFrame) {
if (isLogsData(series)) {
logSeries.push(series);
continue;
@ -289,7 +289,7 @@ export function seriesDataToLogsModel(seriesData: SeriesData[], intervalMs: numb
};
}
export function logSeriesToLogsModel(logSeries: SeriesData[]): LogsModel {
export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
if (logSeries.length === 0) {
return undefined;
}
@ -355,7 +355,7 @@ export function logSeriesToLogsModel(logSeries: SeriesData[]): LogsModel {
}
export function processLogSeriesRow(
series: SeriesData,
series: DataFrame,
fieldCache: FieldCache,
rowIndex: number,
uniqueLabels: Labels

View File

@ -1,11 +1,11 @@
import { SeriesData, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/ui';
import { DataFrame, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/ui';
import {
dedupLogRows,
calculateFieldStats,
calculateLogsLabelStats,
getParser,
LogsParsers,
seriesDataToLogsModel,
dataFrameToLogsModel,
} from '../logs_model';
describe('dedupLogRows()', () => {
@ -337,23 +337,23 @@ const emptyLogsModel = {
series: [],
};
describe('seriesDataToLogsModel', () => {
describe('dataFrameToLogsModel', () => {
it('given empty series should return empty logs model', () => {
expect(seriesDataToLogsModel([] as SeriesData[], 0)).toMatchObject(emptyLogsModel);
expect(dataFrameToLogsModel([] as DataFrame[], 0)).toMatchObject(emptyLogsModel);
});
it('given series without correct series name should return empty logs model', () => {
const series: SeriesData[] = [
const series: DataFrame[] = [
{
fields: [],
rows: [],
},
];
expect(seriesDataToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given series without a time field should return empty logs model', () => {
const series: SeriesData[] = [
const series: DataFrame[] = [
{
fields: [
{
@ -364,11 +364,11 @@ describe('seriesDataToLogsModel', () => {
rows: [],
},
];
expect(seriesDataToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given series without a string field should return empty logs model', () => {
const series: SeriesData[] = [
const series: DataFrame[] = [
{
fields: [
{
@ -379,11 +379,11 @@ describe('seriesDataToLogsModel', () => {
rows: [],
},
];
expect(seriesDataToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
});
it('given one series should return expected logs model', () => {
const series: SeriesData[] = [
const series: DataFrame[] = [
{
labels: {
filename: '/var/log/grafana/grafana.log',
@ -414,7 +414,7 @@ describe('seriesDataToLogsModel', () => {
},
},
];
const logsModel = seriesDataToLogsModel(series, 0);
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.hasUniqueLabels).toBeFalsy();
expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows).toMatchObject([
@ -449,7 +449,7 @@ describe('seriesDataToLogsModel', () => {
});
it('given one series without labels should return expected logs model', () => {
const series: SeriesData[] = [
const series: DataFrame[] = [
{
fields: [
{
@ -468,7 +468,7 @@ describe('seriesDataToLogsModel', () => {
rows: [['1970-01-01T00:00:01Z', 'WARN boooo', 'dbug']],
},
];
const logsModel = seriesDataToLogsModel(series, 0);
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.rows).toHaveLength(1);
expect(logsModel.rows).toMatchObject([
{
@ -481,7 +481,7 @@ describe('seriesDataToLogsModel', () => {
});
it('given multiple series should return expected logs model', () => {
const series: SeriesData[] = [
const series: DataFrame[] = [
{
labels: {
foo: 'bar',
@ -520,7 +520,7 @@ describe('seriesDataToLogsModel', () => {
rows: [['1970-01-01T00:00:00Z', 'INFO 1'], ['1970-01-01T00:00:02Z', 'INFO 2']],
},
];
const logsModel = seriesDataToLogsModel(series, 0);
const logsModel = dataFrameToLogsModel(series, 0);
expect(logsModel.hasUniqueLabels).toBeTruthy();
expect(logsModel.rows).toHaveLength(3);
expect(logsModel.rows).toMatchObject([

View File

@ -11,7 +11,7 @@ import ErrorBoundary from 'app/core/components/ErrorBoundary/ErrorBoundary';
import { getTimeSrv, TimeSrv } from '../services/TimeSrv';
import { applyPanelTimeOverrides, calculateInnerPanelHeight } from 'app/features/dashboard/utils/panel';
import { profiler } from 'app/core/profiler';
import { getProcessedSeriesData } from '../state/PanelQueryState';
import { getProcessedDataFrame } from '../state/PanelQueryState';
import templateSrv from 'app/features/templating/template_srv';
import config from 'app/core/config';
@ -71,7 +71,7 @@ export class PanelChrome extends PureComponent<Props, State> {
this.setState({
data: {
state: LoadingState.Done,
series: getProcessedSeriesData(panel.snapshotData),
series: getProcessedDataFrame(panel.snapshotData),
},
isFirstLoad: false,
});

View File

@ -1,4 +1,4 @@
import { toDataQueryError, PanelQueryState, getProcessedSeriesData } from './PanelQueryState';
import { toDataQueryError, PanelQueryState, getProcessedDataFrame } from './PanelQueryState';
import { MockDataSourceApi } from 'test/mocks/datasource_srv';
import { DataQueryResponse, LoadingState } from '@grafana/ui';
import { getQueryOptions } from 'test/helpers/getQueryOptions';
@ -53,7 +53,7 @@ describe('PanelQueryState', () => {
});
});
describe('getProcessedSeriesData', () => {
describe('getProcessedDataFrame', () => {
it('converts timeseries to table skipping nulls', () => {
const input1 = {
target: 'Field Name',
@ -64,7 +64,7 @@ describe('getProcessedSeriesData', () => {
target: '',
datapoints: [[100, 1], [200, 2]],
};
const data = getProcessedSeriesData([null, input1, input2, null, null]);
const data = getProcessedDataFrame([null, input1, input2, null, null]);
expect(data.length).toBe(2);
expect(data[0].fields[0].name).toBe(input1.target);
expect(data[0].rows).toBe(input1.datapoints);
@ -82,10 +82,10 @@ describe('getProcessedSeriesData', () => {
});
it('supports null values from query OK', () => {
expect(getProcessedSeriesData([null, null, null, null])).toEqual([]);
expect(getProcessedSeriesData(undefined)).toEqual([]);
expect(getProcessedSeriesData((null as unknown) as any[])).toEqual([]);
expect(getProcessedSeriesData([])).toEqual([]);
expect(getProcessedDataFrame([null, null, null, null])).toEqual([]);
expect(getProcessedDataFrame(undefined)).toEqual([]);
expect(getProcessedDataFrame((null as unknown) as any[])).toEqual([]);
expect(getProcessedDataFrame([])).toEqual([]);
});
});

View File

@ -5,7 +5,7 @@ import isEqual from 'lodash/isEqual';
// Utils & Services
import { getBackendSrv } from 'app/core/services/backend_srv';
import * as dateMath from '@grafana/ui/src/utils/datemath';
import { guessFieldTypes, toSeriesData, isSeriesData } from '@grafana/ui/src/utils';
import { guessFieldTypes, toDataFrame, isDataFrame } from '@grafana/ui/src/utils';
// Types
import {
@ -17,7 +17,7 @@ import {
DataQueryError,
DataStreamObserver,
DataStreamState,
SeriesData,
DataFrame,
DataQueryResponseData,
} from '@grafana/ui';
@ -131,7 +131,7 @@ export class PanelQueryState {
this.response = {
state: LoadingState.Done,
request: this.request,
series: this.sendSeries ? getProcessedSeriesData(resp.data) : [],
series: this.sendSeries ? getProcessedDataFrame(resp.data) : [],
legacy: this.sendLegacy ? translateToLegacyData(resp.data) : undefined,
};
resolve(this.validateStreamsAndGetPanelData());
@ -182,7 +182,7 @@ export class PanelQueryState {
return;
}
const series: SeriesData[] = [];
const series: DataFrame[] = [];
for (const stream of this.streams) {
if (stream.series) {
@ -278,7 +278,7 @@ export class PanelQueryState {
response.legacy = response.series.map(v => toLegacyResponseData(v));
}
if (sendSeries && !response.series.length && response.legacy) {
response.series = response.legacy.map(v => toSeriesData(v));
response.series = response.legacy.map(v => toDataFrame(v));
}
return this.validateStreamsAndGetPanelData();
}
@ -333,7 +333,7 @@ export function toDataQueryError(err: any): DataQueryError {
function translateToLegacyData(data: DataQueryResponseData) {
return data.map(v => {
if (isSeriesData(v)) {
if (isDataFrame(v)) {
return toLegacyResponseData(v);
}
return v;
@ -345,15 +345,15 @@ function translateToLegacyData(data: DataQueryResponseData) {
*
* This is also used by PanelChrome for snapshot support
*/
export function getProcessedSeriesData(results?: any[]): SeriesData[] {
export function getProcessedDataFrame(results?: any[]): DataFrame[] {
if (!results) {
return [];
}
const series: SeriesData[] = [];
const series: DataFrame[] = [];
for (const r of results) {
if (r) {
series.push(guessFieldTypes(toSeriesData(r)));
series.push(guessFieldTypes(toDataFrame(r)));
}
}

View File

@ -213,7 +213,7 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
// This implementation is more or less a copy of GraphPanel's logic.
// TODO: we need to use Graph's panel controller or split it into smaller
// controllers to remove code duplication. Right now we cant easily use that, since Explore
// is not using SeriesData for graph yet
// is not using DataFrame for graph yet
const exclusive = event.ctrlKey || event.metaKey || event.shiftKey;

View File

@ -197,7 +197,7 @@ function mapStateToProps(state: StoreState, { exploreId, index }: QueryRowProps)
const query = queries[index];
const datasourceStatus = datasourceError ? DataSourceStatus.Disconnected : DataSourceStatus.Connected;
const error = queryErrors.filter(queryError => queryError.refId === query.refId)[0];
const series = graphResult ? graphResult : []; // TODO: use SeriesData
const series = graphResult ? graphResult : []; // TODO: use DataFrame
const queryResponse: PanelData = {
series,
state: loadingState,

View File

@ -9,7 +9,7 @@ import {
LogLevel,
TimeRange,
DataQueryError,
SeriesData,
DataFrame,
LogsModel,
TimeSeries,
DataQueryResponseData,
@ -239,7 +239,7 @@ export interface ProcessQueryResultsPayload {
datasourceId: string;
loadingState: LoadingState;
series?: DataQueryResponseData[];
delta?: SeriesData[];
delta?: DataFrame[];
}
export interface RunQueriesBatchPayload {
@ -248,7 +248,7 @@ export interface RunQueriesBatchPayload {
}
export interface LimitMessageRatePayload {
series: SeriesData[];
series: DataFrame[];
exploreId: ExploreId;
datasourceId: string;
}

View File

@ -8,17 +8,17 @@ import {
updateTimeRangeAction,
runQueriesAction,
} from '../actionTypes';
import { SeriesData, LoadingState } from '@grafana/ui';
import { DataFrame, LoadingState } from '@grafana/ui';
import { processQueryResultsEpic } from './processQueryResultsEpic';
import TableModel from 'app/core/table_model';
const testContext = () => {
const serieA: SeriesData = {
const serieA: DataFrame = {
fields: [],
refId: 'A',
rows: [],
};
const serieB: SeriesData = {
const serieB: DataFrame = {
fields: [],
refId: 'B',
rows: [],

View File

@ -14,10 +14,10 @@ import {
clearQueriesAction,
stateSaveAction,
} from '../actionTypes';
import { LoadingState, DataQueryRequest, SeriesData, FieldType } from '@grafana/ui';
import { LoadingState, DataQueryRequest, DataFrame, FieldType } from '@grafana/ui';
const testContext = () => {
const series: SeriesData[] = [
const series: DataFrame[] = [
{
fields: [
{

View File

@ -7,7 +7,7 @@ import {
DataStreamState,
LoadingState,
DataQueryResponse,
SeriesData,
DataFrame,
DataQueryResponseData,
AbsoluteTimeRange,
} from '@grafana/ui';
@ -46,7 +46,7 @@ interface ProcessResponseConfig {
now: number;
loadingState: LoadingState;
series?: DataQueryResponseData[];
delta?: SeriesData[];
delta?: DataFrame[];
}
const processResponse = (config: ProcessResponseConfig) => {

View File

@ -3,17 +3,17 @@ import {
TableData,
isTableData,
LogsModel,
toSeriesData,
toDataFrame,
guessFieldTypes,
DataQueryResponseData,
TimeSeries,
} from '@grafana/ui';
import { ExploreItemState, ExploreMode } from 'app/types/explore';
import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState';
import { getProcessedDataFrame } from 'app/features/dashboard/state/PanelQueryState';
import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
import { sortLogsResult } from 'app/core/utils/explore';
import { seriesDataToLogsModel } from 'app/core/logs_model';
import { dataFrameToLogsModel } from 'app/core/logs_model';
import { default as TimeSeries2 } from 'app/core/time_series2';
import { DataProcessor } from 'app/plugins/panel/graph/data_processor';
@ -77,8 +77,8 @@ export class ResultProcessor {
return null;
}
const graphInterval = this.state.queryIntervals.intervalMs;
const seriesData = this.rawData.map(result => guessFieldTypes(toSeriesData(result)));
const newResults = this.rawData ? seriesDataToLogsModel(seriesData, graphInterval) : null;
const dataFrame = this.rawData.map(result => guessFieldTypes(toDataFrame(result)));
const newResults = this.rawData ? dataFrameToLogsModel(dataFrame, graphInterval) : null;
if (this.replacePreviousResults) {
return newResults;
@ -107,7 +107,7 @@ export class ResultProcessor {
};
private makeTimeSeriesList = (rawData: any[]) => {
const dataList = getProcessedSeriesData(rawData);
const dataList = getProcessedDataFrame(rawData);
const dataProcessor = new DataProcessor({ xaxis: {}, aliasColors: [] }); // Hack before we use GraphSeriesXY instead
const timeSeries = dataProcessor.getSeriesList({ dataList });

View File

@ -8,14 +8,14 @@ import { applyPanelTimeOverrides, getResolution } from 'app/features/dashboard/u
import { ContextSrv } from 'app/core/services/context_srv';
import {
toLegacyResponseData,
isSeriesData,
isDataFrame,
LegacyResponseData,
TimeRange,
DataSourceApi,
PanelData,
LoadingState,
DataQueryResponse,
SeriesData,
DataFrame,
} from '@grafana/ui';
import { Unsubscribable } from 'rxjs';
import { PanelModel } from 'app/features/dashboard/state';
@ -150,7 +150,7 @@ class MetricsPanelCtrl extends PanelCtrl {
// The result should already be processed, but just in case
if (!data.legacy) {
data.legacy = data.series.map(v => {
if (isSeriesData(v)) {
if (isDataFrame(v)) {
return toLegacyResponseData(v);
}
return v;
@ -163,7 +163,7 @@ class MetricsPanelCtrl extends PanelCtrl {
data: data.legacy,
});
} else {
this.handleSeriesData(data.series);
this.handleDataFrame(data.series);
}
},
};
@ -222,14 +222,14 @@ class MetricsPanelCtrl extends PanelCtrl {
});
}
handleSeriesData(data: SeriesData[]) {
handleDataFrame(data: DataFrame[]) {
this.loading = false;
if (this.dashboard && this.dashboard.snapshot) {
this.panel.snapshotData = data;
}
// Subclasses that asked for SeriesData will override
// Subclasses that asked for DataFrame will override
}
handleQueryResult(result: DataQueryResponse) {

View File

@ -2,7 +2,7 @@ import _ from 'lodash';
import flatten from 'app/core/utils/flatten';
import * as queryDef from './query_def';
import TableModel from 'app/core/table_model';
import { SeriesData, DataQueryResponse, toSeriesData, FieldType } from '@grafana/ui';
import { DataFrame, DataQueryResponse, toDataFrame, FieldType } from '@grafana/ui';
export class ElasticResponse {
constructor(private targets, private response) {
@ -414,7 +414,7 @@ export class ElasticResponse {
}
getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse {
const seriesData: SeriesData[] = [];
const dataFrame: DataFrame[] = [];
const docs: any[] = [];
for (let n = 0; n < this.response.responses.length; n++) {
@ -462,7 +462,7 @@ export class ElasticResponse {
if (docs.length > 0) {
propNames = propNames.sort();
const series: SeriesData = {
const series: DataFrame = {
fields: [
{
name: this.targets[0].timeField,
@ -527,7 +527,7 @@ export class ElasticResponse {
series.rows.push(row);
}
seriesData.push(series);
dataFrame.push(series);
}
if (response.aggregations) {
@ -541,13 +541,13 @@ export class ElasticResponse {
this.nameSeries(tmpSeriesList, target);
for (let y = 0; y < tmpSeriesList.length; y++) {
const series = toSeriesData(tmpSeriesList[y]);
const series = toDataFrame(tmpSeriesList[y]);
series.labels = {};
seriesData.push(series);
dataFrame.push(series);
}
}
}
return { data: seriesData };
return { data: dataFrame };
}
}

View File

@ -80,15 +80,15 @@ export default class ResponseParser {
return id.substring(startIndex, endIndex);
}
static convertDataToPoints(timeSeriesData) {
static convertDataToPoints(timeDataFrame) {
const dataPoints: any[] = [];
for (let k = 0; k < timeSeriesData.length; k++) {
const epoch = ResponseParser.dateTimeToEpoch(timeSeriesData[k].timeStamp);
const aggKey = ResponseParser.getKeyForAggregationField(timeSeriesData[k]);
for (let k = 0; k < timeDataFrame.length; k++) {
const epoch = ResponseParser.dateTimeToEpoch(timeDataFrame[k].timeStamp);
const aggKey = ResponseParser.getKeyForAggregationField(timeDataFrame[k]);
if (aggKey) {
dataPoints.push([timeSeriesData[k][aggKey], epoch]);
dataPoints.push([timeDataFrame[k][aggKey], epoch]);
}
}

View File

@ -6,7 +6,7 @@ import './editor/editor_component';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { auto } from 'angular';
import { SeriesData } from '@grafana/ui';
import { DataFrame } from '@grafana/ui';
export interface ResultFormat {
text: string;
@ -125,7 +125,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
}
}
onDataReceived(dataList: SeriesData[]) {
onDataReceived(dataList: DataFrame[]) {
this.lastQueryError = undefined;
this.lastQuery = '';

View File

@ -4,7 +4,7 @@ import React, { PureComponent } from 'react';
// Types
import { InputOptions } from './types';
import { DataSourcePluginOptionsEditorProps, DataSourceSettings, SeriesData, TableInputCSV, toCSV } from '@grafana/ui';
import { DataSourcePluginOptionsEditorProps, DataSourceSettings, DataFrame, TableInputCSV, toCSV } from '@grafana/ui';
type InputSettings = DataSourceSettings<InputOptions>;
@ -27,7 +27,7 @@ export class InputConfigEditor extends PureComponent<Props, State> {
}
}
onSeriesParsed = (data: SeriesData[], text: string) => {
onSeriesParsed = (data: DataFrame[], text: string) => {
const { options, onOptionsChange } = this.props;
if (!data) {
data = [

View File

@ -1,4 +1,4 @@
import InputDatasource, { describeSeriesData } from './InputDatasource';
import InputDatasource, { describeDataFrame } from './InputDatasource';
import { InputQuery, InputOptions } from './types';
import { readCSV, DataSourceInstanceSettings, PluginMeta } from '@grafana/ui';
import { getQueryOptions } from 'test/helpers/getQueryOptions';
@ -32,11 +32,11 @@ describe('InputDatasource', () => {
});
});
test('SeriesData descriptions', () => {
expect(describeSeriesData([])).toEqual('');
expect(describeSeriesData(null)).toEqual('');
test('DataFrame descriptions', () => {
expect(describeDataFrame([])).toEqual('');
expect(describeDataFrame(null)).toEqual('');
expect(
describeSeriesData([
describeDataFrame([
{
name: 'x',
fields: [{ name: 'a' }],

View File

@ -1,7 +1,7 @@
// Types
import {
DataQueryRequest,
SeriesData,
DataFrame,
DataQueryResponse,
DataSourceApi,
DataSourceInstanceSettings,
@ -10,7 +10,7 @@ import {
import { InputQuery, InputOptions } from './types';
export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
data: SeriesData[];
data: DataFrame[];
constructor(instanceSettings: DataSourceInstanceSettings<InputOptions>) {
super(instanceSettings);
@ -23,9 +23,9 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
*/
getQueryDisplayText(query: InputQuery): string {
if (query.data) {
return 'Panel Data: ' + describeSeriesData(query.data);
return 'Panel Data: ' + describeDataFrame(query.data);
}
return `Shared Data From: ${this.name} (${describeSeriesData(this.data)})`;
return `Shared Data From: ${this.name} (${describeDataFrame(this.data)})`;
}
metricFindQuery(query: string, options?: any): Promise<MetricFindValue[]> {
@ -44,7 +44,7 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
}
query(options: DataQueryRequest<InputQuery>): Promise<DataQueryResponse> {
const results: SeriesData[] = [];
const results: DataFrame[] = [];
for (const query of options.targets) {
if (query.hide) {
continue;
@ -83,7 +83,7 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
}
}
export function describeSeriesData(data: SeriesData[]): string {
export function describeDataFrame(data: DataFrame[]): string {
if (!data || !data.length) {
return '';
}

View File

@ -2,10 +2,10 @@
import React, { PureComponent } from 'react';
// Types
import { InputDatasource, describeSeriesData } from './InputDatasource';
import { InputDatasource, describeDataFrame } from './InputDatasource';
import { InputQuery, InputOptions } from './types';
import { FormLabel, Select, QueryEditorProps, SelectOptionItem, SeriesData, TableInputCSV, toCSV } from '@grafana/ui';
import { FormLabel, Select, QueryEditorProps, SelectOptionItem, DataFrame, TableInputCSV, toCSV } from '@grafana/ui';
type Props = QueryEditorProps<InputDatasource, InputQuery, InputOptions>;
@ -31,7 +31,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
onSourceChange = (item: SelectOptionItem<string>) => {
const { datasource, query, onChange, onRunQuery } = this.props;
let data: SeriesData[] | undefined = undefined;
let data: DataFrame[] | undefined = undefined;
if (item.value === 'panel') {
if (query.data) {
return;
@ -51,7 +51,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
onRunQuery();
};
onSeriesParsed = (data: SeriesData[], text: string) => {
onSeriesParsed = (data: DataFrame[], text: string) => {
const { query, onChange, onRunQuery } = this.props;
this.setState({ text });
if (!data) {
@ -80,10 +80,10 @@ export class InputQueryEditor extends PureComponent<Props, State> {
<div className="btn btn-link">
{query.data ? (
describeSeriesData(query.data)
describeDataFrame(query.data)
) : (
<a href={`datasources/edit/${id}/`}>
{name}: {describeSeriesData(datasource.data)} &nbsp;&nbsp;
{name}: {describeDataFrame(datasource.data)} &nbsp;&nbsp;
<i className="fa fa-pencil-square-o" />
</a>
)}

View File

@ -1,11 +1,11 @@
import { DataQuery, SeriesData, DataSourceJsonData } from '@grafana/ui/src/types';
import { DataQuery, DataFrame, DataSourceJsonData } from '@grafana/ui/src/types';
export interface InputQuery extends DataQuery {
// Data saved in the panel
data?: SeriesData[];
data?: DataFrame[];
}
export interface InputOptions extends DataSourceJsonData {
// Saved in the datasource and download with bootData
data?: SeriesData[];
data?: DataFrame[];
}

View File

@ -1,7 +1,7 @@
import LokiDatasource from './datasource';
import { LokiQuery } from './types';
import { getQueryOptions } from 'test/helpers/getQueryOptions';
import { SeriesData, DataSourceApi } from '@grafana/ui';
import { DataFrame, DataSourceApi } from '@grafana/ui';
import { BackendSrv } from 'app/core/services/backend_srv';
import { TemplateSrv } from 'app/features/templating/template_srv';
@ -66,10 +66,10 @@ describe('LokiDatasource', () => {
const res = await ds.query(options);
const seriesData = res.data[0] as SeriesData;
expect(seriesData.rows[0][1]).toBe('hello');
expect(seriesData.meta.limit).toBe(20);
expect(seriesData.meta.searchWords).toEqual(['(?i)foo']);
const dataFrame = res.data[0] as DataFrame;
expect(dataFrame.rows[0][1]).toBe('hello');
expect(dataFrame.meta.limit).toBe(20);
expect(dataFrame.meta.searchWords).toEqual(['(?i)foo']);
done();
});
});

View File

@ -8,14 +8,14 @@ import { catchError, map } from 'rxjs/operators';
import * as dateMath from '@grafana/ui/src/utils/datemath';
import { addLabelToSelector } from 'app/plugins/datasource/prometheus/add_label_to_query';
import LanguageProvider from './language_provider';
import { logStreamToSeriesData } from './result_transformer';
import { logStreamToDataFrame } from './result_transformer';
import { formatQuery, parseQuery, getHighlighterExpressionsFromQuery } from './query_utils';
// Types
import {
PluginMeta,
DataQueryRequest,
SeriesData,
DataFrame,
DataSourceApi,
DataSourceInstanceSettings,
DataQueryError,
@ -147,25 +147,25 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
return error;
};
processResult = (data: any, target: any): SeriesData[] => {
const series: SeriesData[] = [];
processResult = (data: any, target: any): DataFrame[] => {
const series: DataFrame[] = [];
if (Object.keys(data).length === 0) {
return series;
}
if (!data.streams) {
return [{ ...logStreamToSeriesData(data), refId: target.refId }];
return [{ ...logStreamToDataFrame(data), refId: target.refId }];
}
for (const stream of data.streams || []) {
const seriesData = logStreamToSeriesData(stream);
seriesData.refId = target.refId;
seriesData.meta = {
const dataFrame = logStreamToDataFrame(stream);
dataFrame.refId = target.refId;
dataFrame.meta = {
searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)),
limit: this.maxLines,
};
series.push(seriesData);
series.push(dataFrame);
}
return series;
@ -233,7 +233,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
);
return Promise.all(queries).then((results: any[]) => {
let series: SeriesData[] = [];
let series: DataFrame[] = [];
for (let i = 0; i < results.length; i++) {
const result = results[i];
@ -328,14 +328,14 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
(options && options.limit) || 10,
(options && options.direction) || 'BACKWARD'
);
const series: SeriesData[] = [];
const series: DataFrame[] = [];
try {
const result = await this._request('/api/prom/query', target);
if (result.data) {
for (const stream of result.data.streams || []) {
const seriesData = logStreamToSeriesData(stream);
series.push(seriesData);
const dataFrame = logStreamToDataFrame(stream);
series.push(dataFrame);
}
}
if (options && options.direction === 'FORWARD') {

View File

@ -1,6 +1,6 @@
import { logStreamToSeriesData } from './result_transformer';
import { logStreamToDataFrame } from './result_transformer';
describe('convert loki response to SeriesData', () => {
describe('convert loki response to DataFrame', () => {
const streams = [
{
labels: '{foo="bar"}',
@ -22,7 +22,7 @@ describe('convert loki response to SeriesData', () => {
},
];
it('converts streams to series', () => {
const data = streams.map(stream => logStreamToSeriesData(stream));
const data = streams.map(stream => logStreamToDataFrame(stream));
expect(data.length).toBe(2);
expect(data[0].labels['foo']).toEqual('bar');

View File

@ -1,7 +1,7 @@
import { LokiLogsStream } from './types';
import { SeriesData, parseLabels, FieldType, Labels } from '@grafana/ui';
import { DataFrame, parseLabels, FieldType, Labels } from '@grafana/ui';
export function logStreamToSeriesData(stream: LokiLogsStream): SeriesData {
export function logStreamToDataFrame(stream: LokiLogsStream): DataFrame {
let labels: Labels = stream.parsedLabels;
if (!labels && stream.labels) {
labels = parseLabels(stream.labels);

View File

@ -15,7 +15,7 @@ import BracesPlugin from 'app/features/explore/slate-plugins/braces';
import QueryField, { TypeaheadInput, QueryFieldState } from 'app/features/explore/QueryField';
import { PromQuery, PromContext, PromOptions } from '../types';
import { CancelablePromise, makePromiseCancelable } from 'app/core/utils/CancelablePromise';
import { ExploreQueryFieldProps, DataSourceStatus, QueryHint, isSeriesData, toLegacyResponseData } from '@grafana/ui';
import { ExploreQueryFieldProps, DataSourceStatus, QueryHint, isDataFrame, toLegacyResponseData } from '@grafana/ui';
import { PrometheusDatasource } from '../datasource';
const HISTOGRAM_GROUP = '__histograms__';
@ -181,7 +181,7 @@ class PromQueryField extends React.PureComponent<PromQueryFieldProps, PromQueryF
return;
}
const result = isSeriesData(queryResponse.series[0])
const result = isDataFrame(queryResponse.series[0])
? queryResponse.series.map(toLegacyResponseData)
: queryResponse.series;
const hints = datasource.getQueryHints(query, result);

View File

@ -2,7 +2,7 @@ import defaults from 'lodash/defaults';
import {
DataQueryRequest,
FieldType,
SeriesData,
DataFrame,
DataQueryResponse,
DataQueryError,
DataStreamObserver,
@ -171,14 +171,14 @@ export class SignalWorker extends StreamWorker {
return row;
};
initBuffer(refId: string): SeriesData {
initBuffer(refId: string): DataFrame {
const { speed, buffer } = this.query;
const data = {
fields: [{ name: 'Time', type: FieldType.time }, { name: 'Value', type: FieldType.number }],
rows: [],
refId,
name: 'Signal ' + refId,
} as SeriesData;
} as DataFrame;
for (let i = 0; i < this.bands; i++) {
const suffix = this.bands > 1 ? ` ${i + 1}` : '';
@ -260,7 +260,7 @@ export class FetchWorker extends StreamWorker {
return this.reader.read().then(this.processChunk);
};
onHeader = (series: SeriesData) => {
onHeader = (series: DataFrame) => {
series.refId = this.refId;
this.stream.series = [series];
};
@ -323,14 +323,14 @@ export class LogsWorker extends StreamWorker {
return [time, '[' + this.getRandomLogLevel() + '] ' + this.getRandomLine()];
};
initBuffer(refId: string): SeriesData {
initBuffer(refId: string): DataFrame {
const { speed, buffer } = this.query;
const data = {
fields: [{ name: 'Time', type: FieldType.time }, { name: 'Line', type: FieldType.string }],
rows: [],
refId,
name: 'Logs ' + refId,
} as SeriesData;
} as DataFrame;
const request = this.stream.request;

View File

@ -1,10 +1,10 @@
import _ from 'lodash';
import { TimeRange, colors, getColorFromHexRgbOrName, FieldCache, FieldType, Field, SeriesData } from '@grafana/ui';
import { TimeRange, colors, getColorFromHexRgbOrName, FieldCache, FieldType, Field, DataFrame } from '@grafana/ui';
import TimeSeries from 'app/core/time_series2';
import config from 'app/core/config';
type Options = {
dataList: SeriesData[];
dataList: DataFrame[];
range?: TimeRange;
};

View File

@ -11,8 +11,8 @@ import { DataProcessor } from './data_processor';
import { axesEditorComponent } from './axes_editor';
import config from 'app/core/config';
import TimeSeries from 'app/core/time_series2';
import { getColorFromHexRgbOrName, LegacyResponseData, SeriesData, DataLink, VariableSuggestion } from '@grafana/ui';
import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState';
import { getColorFromHexRgbOrName, LegacyResponseData, DataFrame, DataLink, VariableSuggestion } from '@grafana/ui';
import { getProcessedDataFrame } from 'app/features/dashboard/state/PanelQueryState';
import { PanelQueryRunnerFormat } from 'app/features/dashboard/state/PanelQueryRunner';
import { GraphContextMenuCtrl } from './GraphContextMenuCtrl';
import { getDataLinksVariableSuggestions } from 'app/features/panel/panellinks/link_srv';
@ -26,7 +26,7 @@ class GraphCtrl extends MetricsPanelCtrl {
renderError: boolean;
hiddenSeries: any = {};
seriesList: TimeSeries[] = [];
dataList: SeriesData[] = [];
dataList: DataFrame[] = [];
annotations: any = [];
alertState: any;
@ -209,12 +209,12 @@ class GraphCtrl extends MetricsPanelCtrl {
// This should only be called from the snapshot callback
onDataReceived(dataList: LegacyResponseData[]) {
this.handleSeriesData(getProcessedSeriesData(dataList));
this.handleDataFrame(getProcessedDataFrame(dataList));
}
// Directly support SeriesData skipping event callbacks
handleSeriesData(data: SeriesData[]) {
super.handleSeriesData(data);
// Directly support DataFrame skipping event callbacks
handleDataFrame(data: DataFrame[]) {
super.handleDataFrame(data);
this.dataList = data;
this.seriesList = this.processor.getSeriesList({

View File

@ -1,5 +1,5 @@
import { DataProcessor } from '../data_processor';
import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState';
import { getProcessedDataFrame } from 'app/features/dashboard/state/PanelQueryState';
describe('Graph DataProcessor', () => {
const panel: any = {
@ -11,7 +11,7 @@ describe('Graph DataProcessor', () => {
describe('getTimeSeries from LegacyResponseData', () => {
// Try each type of data
const dataList = getProcessedSeriesData([
const dataList = getProcessedDataFrame([
{
alias: 'First (time_series)',
datapoints: [[1, 1001], [2, 1002], [3, 1003]],

View File

@ -319,19 +319,19 @@ export class HeatmapCtrl extends MetricsPanelCtrl {
this.render();
}
seriesHandler(seriesData: any) {
if (seriesData.datapoints === undefined) {
seriesHandler(dataFrame: any) {
if (dataFrame.datapoints === undefined) {
throw new Error('Heatmap error: data should be a time series');
}
const series = new TimeSeries({
datapoints: seriesData.datapoints,
alias: seriesData.target,
datapoints: dataFrame.datapoints,
alias: dataFrame.target,
});
series.flotpairs = series.getFlotPairs(this.panel.nullPointMode);
const datapoints = seriesData.datapoints || [];
const datapoints = dataFrame.datapoints || [];
if (datapoints && datapoints.length > 0) {
const last = datapoints[datapoints.length - 1][1];
const from = this.range.from;

View File

@ -140,10 +140,10 @@ class SingleStatCtrl extends MetricsPanelCtrl {
this.render();
}
seriesHandler(seriesData: any) {
seriesHandler(dataFrame: any) {
const series = new TimeSeries({
datapoints: seriesData.datapoints || [],
alias: seriesData.target,
datapoints: dataFrame.datapoints || [],
alias: dataFrame.target,
});
series.flotpairs = series.getFlotPairs(this.panel.nullPointMode);