mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Refactor: rename SeriesData to DataFrame (#17854)
This commit is contained in:
@@ -6,7 +6,7 @@ import {
|
||||
TimeSeries,
|
||||
Labels,
|
||||
LogLevel,
|
||||
SeriesData,
|
||||
DataFrame,
|
||||
findCommonLabels,
|
||||
findUniqueLabels,
|
||||
getLogLevel,
|
||||
@@ -250,15 +250,15 @@ export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): Time
|
||||
});
|
||||
}
|
||||
|
||||
function isLogsData(series: SeriesData) {
|
||||
function isLogsData(series: DataFrame) {
|
||||
return series.fields.some(f => f.type === FieldType.time) && series.fields.some(f => f.type === FieldType.string);
|
||||
}
|
||||
|
||||
export function seriesDataToLogsModel(seriesData: SeriesData[], intervalMs: number): LogsModel {
|
||||
const metricSeries: SeriesData[] = [];
|
||||
const logSeries: SeriesData[] = [];
|
||||
export function dataFrameToLogsModel(dataFrame: DataFrame[], intervalMs: number): LogsModel {
|
||||
const metricSeries: DataFrame[] = [];
|
||||
const logSeries: DataFrame[] = [];
|
||||
|
||||
for (const series of seriesData) {
|
||||
for (const series of dataFrame) {
|
||||
if (isLogsData(series)) {
|
||||
logSeries.push(series);
|
||||
continue;
|
||||
@@ -289,7 +289,7 @@ export function seriesDataToLogsModel(seriesData: SeriesData[], intervalMs: numb
|
||||
};
|
||||
}
|
||||
|
||||
export function logSeriesToLogsModel(logSeries: SeriesData[]): LogsModel {
|
||||
export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
|
||||
if (logSeries.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -355,7 +355,7 @@ export function logSeriesToLogsModel(logSeries: SeriesData[]): LogsModel {
|
||||
}
|
||||
|
||||
export function processLogSeriesRow(
|
||||
series: SeriesData,
|
||||
series: DataFrame,
|
||||
fieldCache: FieldCache,
|
||||
rowIndex: number,
|
||||
uniqueLabels: Labels
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { SeriesData, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/ui';
|
||||
import { DataFrame, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/ui';
|
||||
import {
|
||||
dedupLogRows,
|
||||
calculateFieldStats,
|
||||
calculateLogsLabelStats,
|
||||
getParser,
|
||||
LogsParsers,
|
||||
seriesDataToLogsModel,
|
||||
dataFrameToLogsModel,
|
||||
} from '../logs_model';
|
||||
|
||||
describe('dedupLogRows()', () => {
|
||||
@@ -337,23 +337,23 @@ const emptyLogsModel = {
|
||||
series: [],
|
||||
};
|
||||
|
||||
describe('seriesDataToLogsModel', () => {
|
||||
describe('dataFrameToLogsModel', () => {
|
||||
it('given empty series should return empty logs model', () => {
|
||||
expect(seriesDataToLogsModel([] as SeriesData[], 0)).toMatchObject(emptyLogsModel);
|
||||
expect(dataFrameToLogsModel([] as DataFrame[], 0)).toMatchObject(emptyLogsModel);
|
||||
});
|
||||
|
||||
it('given series without correct series name should return empty logs model', () => {
|
||||
const series: SeriesData[] = [
|
||||
const series: DataFrame[] = [
|
||||
{
|
||||
fields: [],
|
||||
rows: [],
|
||||
},
|
||||
];
|
||||
expect(seriesDataToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
|
||||
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
|
||||
});
|
||||
|
||||
it('given series without a time field should return empty logs model', () => {
|
||||
const series: SeriesData[] = [
|
||||
const series: DataFrame[] = [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
@@ -364,11 +364,11 @@ describe('seriesDataToLogsModel', () => {
|
||||
rows: [],
|
||||
},
|
||||
];
|
||||
expect(seriesDataToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
|
||||
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
|
||||
});
|
||||
|
||||
it('given series without a string field should return empty logs model', () => {
|
||||
const series: SeriesData[] = [
|
||||
const series: DataFrame[] = [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
@@ -379,11 +379,11 @@ describe('seriesDataToLogsModel', () => {
|
||||
rows: [],
|
||||
},
|
||||
];
|
||||
expect(seriesDataToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
|
||||
expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
|
||||
});
|
||||
|
||||
it('given one series should return expected logs model', () => {
|
||||
const series: SeriesData[] = [
|
||||
const series: DataFrame[] = [
|
||||
{
|
||||
labels: {
|
||||
filename: '/var/log/grafana/grafana.log',
|
||||
@@ -414,7 +414,7 @@ describe('seriesDataToLogsModel', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
const logsModel = seriesDataToLogsModel(series, 0);
|
||||
const logsModel = dataFrameToLogsModel(series, 0);
|
||||
expect(logsModel.hasUniqueLabels).toBeFalsy();
|
||||
expect(logsModel.rows).toHaveLength(2);
|
||||
expect(logsModel.rows).toMatchObject([
|
||||
@@ -449,7 +449,7 @@ describe('seriesDataToLogsModel', () => {
|
||||
});
|
||||
|
||||
it('given one series without labels should return expected logs model', () => {
|
||||
const series: SeriesData[] = [
|
||||
const series: DataFrame[] = [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
@@ -468,7 +468,7 @@ describe('seriesDataToLogsModel', () => {
|
||||
rows: [['1970-01-01T00:00:01Z', 'WARN boooo', 'dbug']],
|
||||
},
|
||||
];
|
||||
const logsModel = seriesDataToLogsModel(series, 0);
|
||||
const logsModel = dataFrameToLogsModel(series, 0);
|
||||
expect(logsModel.rows).toHaveLength(1);
|
||||
expect(logsModel.rows).toMatchObject([
|
||||
{
|
||||
@@ -481,7 +481,7 @@ describe('seriesDataToLogsModel', () => {
|
||||
});
|
||||
|
||||
it('given multiple series should return expected logs model', () => {
|
||||
const series: SeriesData[] = [
|
||||
const series: DataFrame[] = [
|
||||
{
|
||||
labels: {
|
||||
foo: 'bar',
|
||||
@@ -520,7 +520,7 @@ describe('seriesDataToLogsModel', () => {
|
||||
rows: [['1970-01-01T00:00:00Z', 'INFO 1'], ['1970-01-01T00:00:02Z', 'INFO 2']],
|
||||
},
|
||||
];
|
||||
const logsModel = seriesDataToLogsModel(series, 0);
|
||||
const logsModel = dataFrameToLogsModel(series, 0);
|
||||
expect(logsModel.hasUniqueLabels).toBeTruthy();
|
||||
expect(logsModel.rows).toHaveLength(3);
|
||||
expect(logsModel.rows).toMatchObject([
|
||||
|
||||
@@ -11,7 +11,7 @@ import ErrorBoundary from 'app/core/components/ErrorBoundary/ErrorBoundary';
|
||||
import { getTimeSrv, TimeSrv } from '../services/TimeSrv';
|
||||
import { applyPanelTimeOverrides, calculateInnerPanelHeight } from 'app/features/dashboard/utils/panel';
|
||||
import { profiler } from 'app/core/profiler';
|
||||
import { getProcessedSeriesData } from '../state/PanelQueryState';
|
||||
import { getProcessedDataFrame } from '../state/PanelQueryState';
|
||||
import templateSrv from 'app/features/templating/template_srv';
|
||||
import config from 'app/core/config';
|
||||
|
||||
@@ -71,7 +71,7 @@ export class PanelChrome extends PureComponent<Props, State> {
|
||||
this.setState({
|
||||
data: {
|
||||
state: LoadingState.Done,
|
||||
series: getProcessedSeriesData(panel.snapshotData),
|
||||
series: getProcessedDataFrame(panel.snapshotData),
|
||||
},
|
||||
isFirstLoad: false,
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { toDataQueryError, PanelQueryState, getProcessedSeriesData } from './PanelQueryState';
|
||||
import { toDataQueryError, PanelQueryState, getProcessedDataFrame } from './PanelQueryState';
|
||||
import { MockDataSourceApi } from 'test/mocks/datasource_srv';
|
||||
import { DataQueryResponse, LoadingState } from '@grafana/ui';
|
||||
import { getQueryOptions } from 'test/helpers/getQueryOptions';
|
||||
@@ -53,7 +53,7 @@ describe('PanelQueryState', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getProcessedSeriesData', () => {
|
||||
describe('getProcessedDataFrame', () => {
|
||||
it('converts timeseries to table skipping nulls', () => {
|
||||
const input1 = {
|
||||
target: 'Field Name',
|
||||
@@ -64,7 +64,7 @@ describe('getProcessedSeriesData', () => {
|
||||
target: '',
|
||||
datapoints: [[100, 1], [200, 2]],
|
||||
};
|
||||
const data = getProcessedSeriesData([null, input1, input2, null, null]);
|
||||
const data = getProcessedDataFrame([null, input1, input2, null, null]);
|
||||
expect(data.length).toBe(2);
|
||||
expect(data[0].fields[0].name).toBe(input1.target);
|
||||
expect(data[0].rows).toBe(input1.datapoints);
|
||||
@@ -82,10 +82,10 @@ describe('getProcessedSeriesData', () => {
|
||||
});
|
||||
|
||||
it('supports null values from query OK', () => {
|
||||
expect(getProcessedSeriesData([null, null, null, null])).toEqual([]);
|
||||
expect(getProcessedSeriesData(undefined)).toEqual([]);
|
||||
expect(getProcessedSeriesData((null as unknown) as any[])).toEqual([]);
|
||||
expect(getProcessedSeriesData([])).toEqual([]);
|
||||
expect(getProcessedDataFrame([null, null, null, null])).toEqual([]);
|
||||
expect(getProcessedDataFrame(undefined)).toEqual([]);
|
||||
expect(getProcessedDataFrame((null as unknown) as any[])).toEqual([]);
|
||||
expect(getProcessedDataFrame([])).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import isEqual from 'lodash/isEqual';
|
||||
// Utils & Services
|
||||
import { getBackendSrv } from 'app/core/services/backend_srv';
|
||||
import * as dateMath from '@grafana/ui/src/utils/datemath';
|
||||
import { guessFieldTypes, toSeriesData, isSeriesData } from '@grafana/ui/src/utils';
|
||||
import { guessFieldTypes, toDataFrame, isDataFrame } from '@grafana/ui/src/utils';
|
||||
|
||||
// Types
|
||||
import {
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
DataQueryError,
|
||||
DataStreamObserver,
|
||||
DataStreamState,
|
||||
SeriesData,
|
||||
DataFrame,
|
||||
DataQueryResponseData,
|
||||
} from '@grafana/ui';
|
||||
|
||||
@@ -131,7 +131,7 @@ export class PanelQueryState {
|
||||
this.response = {
|
||||
state: LoadingState.Done,
|
||||
request: this.request,
|
||||
series: this.sendSeries ? getProcessedSeriesData(resp.data) : [],
|
||||
series: this.sendSeries ? getProcessedDataFrame(resp.data) : [],
|
||||
legacy: this.sendLegacy ? translateToLegacyData(resp.data) : undefined,
|
||||
};
|
||||
resolve(this.validateStreamsAndGetPanelData());
|
||||
@@ -182,7 +182,7 @@ export class PanelQueryState {
|
||||
return;
|
||||
}
|
||||
|
||||
const series: SeriesData[] = [];
|
||||
const series: DataFrame[] = [];
|
||||
|
||||
for (const stream of this.streams) {
|
||||
if (stream.series) {
|
||||
@@ -278,7 +278,7 @@ export class PanelQueryState {
|
||||
response.legacy = response.series.map(v => toLegacyResponseData(v));
|
||||
}
|
||||
if (sendSeries && !response.series.length && response.legacy) {
|
||||
response.series = response.legacy.map(v => toSeriesData(v));
|
||||
response.series = response.legacy.map(v => toDataFrame(v));
|
||||
}
|
||||
return this.validateStreamsAndGetPanelData();
|
||||
}
|
||||
@@ -333,7 +333,7 @@ export function toDataQueryError(err: any): DataQueryError {
|
||||
|
||||
function translateToLegacyData(data: DataQueryResponseData) {
|
||||
return data.map(v => {
|
||||
if (isSeriesData(v)) {
|
||||
if (isDataFrame(v)) {
|
||||
return toLegacyResponseData(v);
|
||||
}
|
||||
return v;
|
||||
@@ -345,15 +345,15 @@ function translateToLegacyData(data: DataQueryResponseData) {
|
||||
*
|
||||
* This is also used by PanelChrome for snapshot support
|
||||
*/
|
||||
export function getProcessedSeriesData(results?: any[]): SeriesData[] {
|
||||
export function getProcessedDataFrame(results?: any[]): DataFrame[] {
|
||||
if (!results) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const series: SeriesData[] = [];
|
||||
const series: DataFrame[] = [];
|
||||
for (const r of results) {
|
||||
if (r) {
|
||||
series.push(guessFieldTypes(toSeriesData(r)));
|
||||
series.push(guessFieldTypes(toDataFrame(r)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -213,7 +213,7 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
|
||||
// This implementation is more or less a copy of GraphPanel's logic.
|
||||
// TODO: we need to use Graph's panel controller or split it into smaller
|
||||
// controllers to remove code duplication. Right now we cant easily use that, since Explore
|
||||
// is not using SeriesData for graph yet
|
||||
// is not using DataFrame for graph yet
|
||||
|
||||
const exclusive = event.ctrlKey || event.metaKey || event.shiftKey;
|
||||
|
||||
|
||||
@@ -197,7 +197,7 @@ function mapStateToProps(state: StoreState, { exploreId, index }: QueryRowProps)
|
||||
const query = queries[index];
|
||||
const datasourceStatus = datasourceError ? DataSourceStatus.Disconnected : DataSourceStatus.Connected;
|
||||
const error = queryErrors.filter(queryError => queryError.refId === query.refId)[0];
|
||||
const series = graphResult ? graphResult : []; // TODO: use SeriesData
|
||||
const series = graphResult ? graphResult : []; // TODO: use DataFrame
|
||||
const queryResponse: PanelData = {
|
||||
series,
|
||||
state: loadingState,
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
LogLevel,
|
||||
TimeRange,
|
||||
DataQueryError,
|
||||
SeriesData,
|
||||
DataFrame,
|
||||
LogsModel,
|
||||
TimeSeries,
|
||||
DataQueryResponseData,
|
||||
@@ -239,7 +239,7 @@ export interface ProcessQueryResultsPayload {
|
||||
datasourceId: string;
|
||||
loadingState: LoadingState;
|
||||
series?: DataQueryResponseData[];
|
||||
delta?: SeriesData[];
|
||||
delta?: DataFrame[];
|
||||
}
|
||||
|
||||
export interface RunQueriesBatchPayload {
|
||||
@@ -248,7 +248,7 @@ export interface RunQueriesBatchPayload {
|
||||
}
|
||||
|
||||
export interface LimitMessageRatePayload {
|
||||
series: SeriesData[];
|
||||
series: DataFrame[];
|
||||
exploreId: ExploreId;
|
||||
datasourceId: string;
|
||||
}
|
||||
|
||||
@@ -8,17 +8,17 @@ import {
|
||||
updateTimeRangeAction,
|
||||
runQueriesAction,
|
||||
} from '../actionTypes';
|
||||
import { SeriesData, LoadingState } from '@grafana/ui';
|
||||
import { DataFrame, LoadingState } from '@grafana/ui';
|
||||
import { processQueryResultsEpic } from './processQueryResultsEpic';
|
||||
import TableModel from 'app/core/table_model';
|
||||
|
||||
const testContext = () => {
|
||||
const serieA: SeriesData = {
|
||||
const serieA: DataFrame = {
|
||||
fields: [],
|
||||
refId: 'A',
|
||||
rows: [],
|
||||
};
|
||||
const serieB: SeriesData = {
|
||||
const serieB: DataFrame = {
|
||||
fields: [],
|
||||
refId: 'B',
|
||||
rows: [],
|
||||
|
||||
@@ -14,10 +14,10 @@ import {
|
||||
clearQueriesAction,
|
||||
stateSaveAction,
|
||||
} from '../actionTypes';
|
||||
import { LoadingState, DataQueryRequest, SeriesData, FieldType } from '@grafana/ui';
|
||||
import { LoadingState, DataQueryRequest, DataFrame, FieldType } from '@grafana/ui';
|
||||
|
||||
const testContext = () => {
|
||||
const series: SeriesData[] = [
|
||||
const series: DataFrame[] = [
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
DataStreamState,
|
||||
LoadingState,
|
||||
DataQueryResponse,
|
||||
SeriesData,
|
||||
DataFrame,
|
||||
DataQueryResponseData,
|
||||
AbsoluteTimeRange,
|
||||
} from '@grafana/ui';
|
||||
@@ -46,7 +46,7 @@ interface ProcessResponseConfig {
|
||||
now: number;
|
||||
loadingState: LoadingState;
|
||||
series?: DataQueryResponseData[];
|
||||
delta?: SeriesData[];
|
||||
delta?: DataFrame[];
|
||||
}
|
||||
|
||||
const processResponse = (config: ProcessResponseConfig) => {
|
||||
|
||||
@@ -3,17 +3,17 @@ import {
|
||||
TableData,
|
||||
isTableData,
|
||||
LogsModel,
|
||||
toSeriesData,
|
||||
toDataFrame,
|
||||
guessFieldTypes,
|
||||
DataQueryResponseData,
|
||||
TimeSeries,
|
||||
} from '@grafana/ui';
|
||||
|
||||
import { ExploreItemState, ExploreMode } from 'app/types/explore';
|
||||
import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState';
|
||||
import { getProcessedDataFrame } from 'app/features/dashboard/state/PanelQueryState';
|
||||
import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
|
||||
import { sortLogsResult } from 'app/core/utils/explore';
|
||||
import { seriesDataToLogsModel } from 'app/core/logs_model';
|
||||
import { dataFrameToLogsModel } from 'app/core/logs_model';
|
||||
import { default as TimeSeries2 } from 'app/core/time_series2';
|
||||
import { DataProcessor } from 'app/plugins/panel/graph/data_processor';
|
||||
|
||||
@@ -77,8 +77,8 @@ export class ResultProcessor {
|
||||
return null;
|
||||
}
|
||||
const graphInterval = this.state.queryIntervals.intervalMs;
|
||||
const seriesData = this.rawData.map(result => guessFieldTypes(toSeriesData(result)));
|
||||
const newResults = this.rawData ? seriesDataToLogsModel(seriesData, graphInterval) : null;
|
||||
const dataFrame = this.rawData.map(result => guessFieldTypes(toDataFrame(result)));
|
||||
const newResults = this.rawData ? dataFrameToLogsModel(dataFrame, graphInterval) : null;
|
||||
|
||||
if (this.replacePreviousResults) {
|
||||
return newResults;
|
||||
@@ -107,7 +107,7 @@ export class ResultProcessor {
|
||||
};
|
||||
|
||||
private makeTimeSeriesList = (rawData: any[]) => {
|
||||
const dataList = getProcessedSeriesData(rawData);
|
||||
const dataList = getProcessedDataFrame(rawData);
|
||||
const dataProcessor = new DataProcessor({ xaxis: {}, aliasColors: [] }); // Hack before we use GraphSeriesXY instead
|
||||
const timeSeries = dataProcessor.getSeriesList({ dataList });
|
||||
|
||||
|
||||
@@ -8,14 +8,14 @@ import { applyPanelTimeOverrides, getResolution } from 'app/features/dashboard/u
|
||||
import { ContextSrv } from 'app/core/services/context_srv';
|
||||
import {
|
||||
toLegacyResponseData,
|
||||
isSeriesData,
|
||||
isDataFrame,
|
||||
LegacyResponseData,
|
||||
TimeRange,
|
||||
DataSourceApi,
|
||||
PanelData,
|
||||
LoadingState,
|
||||
DataQueryResponse,
|
||||
SeriesData,
|
||||
DataFrame,
|
||||
} from '@grafana/ui';
|
||||
import { Unsubscribable } from 'rxjs';
|
||||
import { PanelModel } from 'app/features/dashboard/state';
|
||||
@@ -150,7 +150,7 @@ class MetricsPanelCtrl extends PanelCtrl {
|
||||
// The result should already be processed, but just in case
|
||||
if (!data.legacy) {
|
||||
data.legacy = data.series.map(v => {
|
||||
if (isSeriesData(v)) {
|
||||
if (isDataFrame(v)) {
|
||||
return toLegacyResponseData(v);
|
||||
}
|
||||
return v;
|
||||
@@ -163,7 +163,7 @@ class MetricsPanelCtrl extends PanelCtrl {
|
||||
data: data.legacy,
|
||||
});
|
||||
} else {
|
||||
this.handleSeriesData(data.series);
|
||||
this.handleDataFrame(data.series);
|
||||
}
|
||||
},
|
||||
};
|
||||
@@ -222,14 +222,14 @@ class MetricsPanelCtrl extends PanelCtrl {
|
||||
});
|
||||
}
|
||||
|
||||
handleSeriesData(data: SeriesData[]) {
|
||||
handleDataFrame(data: DataFrame[]) {
|
||||
this.loading = false;
|
||||
|
||||
if (this.dashboard && this.dashboard.snapshot) {
|
||||
this.panel.snapshotData = data;
|
||||
}
|
||||
|
||||
// Subclasses that asked for SeriesData will override
|
||||
// Subclasses that asked for DataFrame will override
|
||||
}
|
||||
|
||||
handleQueryResult(result: DataQueryResponse) {
|
||||
|
||||
@@ -2,7 +2,7 @@ import _ from 'lodash';
|
||||
import flatten from 'app/core/utils/flatten';
|
||||
import * as queryDef from './query_def';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import { SeriesData, DataQueryResponse, toSeriesData, FieldType } from '@grafana/ui';
|
||||
import { DataFrame, DataQueryResponse, toDataFrame, FieldType } from '@grafana/ui';
|
||||
|
||||
export class ElasticResponse {
|
||||
constructor(private targets, private response) {
|
||||
@@ -414,7 +414,7 @@ export class ElasticResponse {
|
||||
}
|
||||
|
||||
getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse {
|
||||
const seriesData: SeriesData[] = [];
|
||||
const dataFrame: DataFrame[] = [];
|
||||
const docs: any[] = [];
|
||||
|
||||
for (let n = 0; n < this.response.responses.length; n++) {
|
||||
@@ -462,7 +462,7 @@ export class ElasticResponse {
|
||||
|
||||
if (docs.length > 0) {
|
||||
propNames = propNames.sort();
|
||||
const series: SeriesData = {
|
||||
const series: DataFrame = {
|
||||
fields: [
|
||||
{
|
||||
name: this.targets[0].timeField,
|
||||
@@ -527,7 +527,7 @@ export class ElasticResponse {
|
||||
series.rows.push(row);
|
||||
}
|
||||
|
||||
seriesData.push(series);
|
||||
dataFrame.push(series);
|
||||
}
|
||||
|
||||
if (response.aggregations) {
|
||||
@@ -541,13 +541,13 @@ export class ElasticResponse {
|
||||
this.nameSeries(tmpSeriesList, target);
|
||||
|
||||
for (let y = 0; y < tmpSeriesList.length; y++) {
|
||||
const series = toSeriesData(tmpSeriesList[y]);
|
||||
const series = toDataFrame(tmpSeriesList[y]);
|
||||
series.labels = {};
|
||||
seriesData.push(series);
|
||||
dataFrame.push(series);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { data: seriesData };
|
||||
return { data: dataFrame };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,15 +80,15 @@ export default class ResponseParser {
|
||||
return id.substring(startIndex, endIndex);
|
||||
}
|
||||
|
||||
static convertDataToPoints(timeSeriesData) {
|
||||
static convertDataToPoints(timeDataFrame) {
|
||||
const dataPoints: any[] = [];
|
||||
|
||||
for (let k = 0; k < timeSeriesData.length; k++) {
|
||||
const epoch = ResponseParser.dateTimeToEpoch(timeSeriesData[k].timeStamp);
|
||||
const aggKey = ResponseParser.getKeyForAggregationField(timeSeriesData[k]);
|
||||
for (let k = 0; k < timeDataFrame.length; k++) {
|
||||
const epoch = ResponseParser.dateTimeToEpoch(timeDataFrame[k].timeStamp);
|
||||
const aggKey = ResponseParser.getKeyForAggregationField(timeDataFrame[k]);
|
||||
|
||||
if (aggKey) {
|
||||
dataPoints.push([timeSeriesData[k][aggKey], epoch]);
|
||||
dataPoints.push([timeDataFrame[k][aggKey], epoch]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import './editor/editor_component';
|
||||
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { auto } from 'angular';
|
||||
import { SeriesData } from '@grafana/ui';
|
||||
import { DataFrame } from '@grafana/ui';
|
||||
|
||||
export interface ResultFormat {
|
||||
text: string;
|
||||
@@ -125,7 +125,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
}
|
||||
}
|
||||
|
||||
onDataReceived(dataList: SeriesData[]) {
|
||||
onDataReceived(dataList: DataFrame[]) {
|
||||
this.lastQueryError = undefined;
|
||||
this.lastQuery = '';
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import React, { PureComponent } from 'react';
|
||||
// Types
|
||||
import { InputOptions } from './types';
|
||||
|
||||
import { DataSourcePluginOptionsEditorProps, DataSourceSettings, SeriesData, TableInputCSV, toCSV } from '@grafana/ui';
|
||||
import { DataSourcePluginOptionsEditorProps, DataSourceSettings, DataFrame, TableInputCSV, toCSV } from '@grafana/ui';
|
||||
|
||||
type InputSettings = DataSourceSettings<InputOptions>;
|
||||
|
||||
@@ -27,7 +27,7 @@ export class InputConfigEditor extends PureComponent<Props, State> {
|
||||
}
|
||||
}
|
||||
|
||||
onSeriesParsed = (data: SeriesData[], text: string) => {
|
||||
onSeriesParsed = (data: DataFrame[], text: string) => {
|
||||
const { options, onOptionsChange } = this.props;
|
||||
if (!data) {
|
||||
data = [
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import InputDatasource, { describeSeriesData } from './InputDatasource';
|
||||
import InputDatasource, { describeDataFrame } from './InputDatasource';
|
||||
import { InputQuery, InputOptions } from './types';
|
||||
import { readCSV, DataSourceInstanceSettings, PluginMeta } from '@grafana/ui';
|
||||
import { getQueryOptions } from 'test/helpers/getQueryOptions';
|
||||
@@ -32,11 +32,11 @@ describe('InputDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test('SeriesData descriptions', () => {
|
||||
expect(describeSeriesData([])).toEqual('');
|
||||
expect(describeSeriesData(null)).toEqual('');
|
||||
test('DataFrame descriptions', () => {
|
||||
expect(describeDataFrame([])).toEqual('');
|
||||
expect(describeDataFrame(null)).toEqual('');
|
||||
expect(
|
||||
describeSeriesData([
|
||||
describeDataFrame([
|
||||
{
|
||||
name: 'x',
|
||||
fields: [{ name: 'a' }],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// Types
|
||||
import {
|
||||
DataQueryRequest,
|
||||
SeriesData,
|
||||
DataFrame,
|
||||
DataQueryResponse,
|
||||
DataSourceApi,
|
||||
DataSourceInstanceSettings,
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
import { InputQuery, InputOptions } from './types';
|
||||
|
||||
export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
|
||||
data: SeriesData[];
|
||||
data: DataFrame[];
|
||||
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<InputOptions>) {
|
||||
super(instanceSettings);
|
||||
@@ -23,9 +23,9 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
|
||||
*/
|
||||
getQueryDisplayText(query: InputQuery): string {
|
||||
if (query.data) {
|
||||
return 'Panel Data: ' + describeSeriesData(query.data);
|
||||
return 'Panel Data: ' + describeDataFrame(query.data);
|
||||
}
|
||||
return `Shared Data From: ${this.name} (${describeSeriesData(this.data)})`;
|
||||
return `Shared Data From: ${this.name} (${describeDataFrame(this.data)})`;
|
||||
}
|
||||
|
||||
metricFindQuery(query: string, options?: any): Promise<MetricFindValue[]> {
|
||||
@@ -44,7 +44,7 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
|
||||
}
|
||||
|
||||
query(options: DataQueryRequest<InputQuery>): Promise<DataQueryResponse> {
|
||||
const results: SeriesData[] = [];
|
||||
const results: DataFrame[] = [];
|
||||
for (const query of options.targets) {
|
||||
if (query.hide) {
|
||||
continue;
|
||||
@@ -83,7 +83,7 @@ export class InputDatasource extends DataSourceApi<InputQuery, InputOptions> {
|
||||
}
|
||||
}
|
||||
|
||||
export function describeSeriesData(data: SeriesData[]): string {
|
||||
export function describeDataFrame(data: DataFrame[]): string {
|
||||
if (!data || !data.length) {
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
|
||||
// Types
|
||||
import { InputDatasource, describeSeriesData } from './InputDatasource';
|
||||
import { InputDatasource, describeDataFrame } from './InputDatasource';
|
||||
import { InputQuery, InputOptions } from './types';
|
||||
|
||||
import { FormLabel, Select, QueryEditorProps, SelectOptionItem, SeriesData, TableInputCSV, toCSV } from '@grafana/ui';
|
||||
import { FormLabel, Select, QueryEditorProps, SelectOptionItem, DataFrame, TableInputCSV, toCSV } from '@grafana/ui';
|
||||
|
||||
type Props = QueryEditorProps<InputDatasource, InputQuery, InputOptions>;
|
||||
|
||||
@@ -31,7 +31,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
|
||||
|
||||
onSourceChange = (item: SelectOptionItem<string>) => {
|
||||
const { datasource, query, onChange, onRunQuery } = this.props;
|
||||
let data: SeriesData[] | undefined = undefined;
|
||||
let data: DataFrame[] | undefined = undefined;
|
||||
if (item.value === 'panel') {
|
||||
if (query.data) {
|
||||
return;
|
||||
@@ -51,7 +51,7 @@ export class InputQueryEditor extends PureComponent<Props, State> {
|
||||
onRunQuery();
|
||||
};
|
||||
|
||||
onSeriesParsed = (data: SeriesData[], text: string) => {
|
||||
onSeriesParsed = (data: DataFrame[], text: string) => {
|
||||
const { query, onChange, onRunQuery } = this.props;
|
||||
this.setState({ text });
|
||||
if (!data) {
|
||||
@@ -80,10 +80,10 @@ export class InputQueryEditor extends PureComponent<Props, State> {
|
||||
|
||||
<div className="btn btn-link">
|
||||
{query.data ? (
|
||||
describeSeriesData(query.data)
|
||||
describeDataFrame(query.data)
|
||||
) : (
|
||||
<a href={`datasources/edit/${id}/`}>
|
||||
{name}: {describeSeriesData(datasource.data)}
|
||||
{name}: {describeDataFrame(datasource.data)}
|
||||
<i className="fa fa-pencil-square-o" />
|
||||
</a>
|
||||
)}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { DataQuery, SeriesData, DataSourceJsonData } from '@grafana/ui/src/types';
|
||||
import { DataQuery, DataFrame, DataSourceJsonData } from '@grafana/ui/src/types';
|
||||
|
||||
export interface InputQuery extends DataQuery {
|
||||
// Data saved in the panel
|
||||
data?: SeriesData[];
|
||||
data?: DataFrame[];
|
||||
}
|
||||
|
||||
export interface InputOptions extends DataSourceJsonData {
|
||||
// Saved in the datasource and download with bootData
|
||||
data?: SeriesData[];
|
||||
data?: DataFrame[];
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import LokiDatasource from './datasource';
|
||||
import { LokiQuery } from './types';
|
||||
import { getQueryOptions } from 'test/helpers/getQueryOptions';
|
||||
import { SeriesData, DataSourceApi } from '@grafana/ui';
|
||||
import { DataFrame, DataSourceApi } from '@grafana/ui';
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
@@ -66,10 +66,10 @@ describe('LokiDatasource', () => {
|
||||
|
||||
const res = await ds.query(options);
|
||||
|
||||
const seriesData = res.data[0] as SeriesData;
|
||||
expect(seriesData.rows[0][1]).toBe('hello');
|
||||
expect(seriesData.meta.limit).toBe(20);
|
||||
expect(seriesData.meta.searchWords).toEqual(['(?i)foo']);
|
||||
const dataFrame = res.data[0] as DataFrame;
|
||||
expect(dataFrame.rows[0][1]).toBe('hello');
|
||||
expect(dataFrame.meta.limit).toBe(20);
|
||||
expect(dataFrame.meta.searchWords).toEqual(['(?i)foo']);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,14 +8,14 @@ import { catchError, map } from 'rxjs/operators';
|
||||
import * as dateMath from '@grafana/ui/src/utils/datemath';
|
||||
import { addLabelToSelector } from 'app/plugins/datasource/prometheus/add_label_to_query';
|
||||
import LanguageProvider from './language_provider';
|
||||
import { logStreamToSeriesData } from './result_transformer';
|
||||
import { logStreamToDataFrame } from './result_transformer';
|
||||
import { formatQuery, parseQuery, getHighlighterExpressionsFromQuery } from './query_utils';
|
||||
|
||||
// Types
|
||||
import {
|
||||
PluginMeta,
|
||||
DataQueryRequest,
|
||||
SeriesData,
|
||||
DataFrame,
|
||||
DataSourceApi,
|
||||
DataSourceInstanceSettings,
|
||||
DataQueryError,
|
||||
@@ -147,25 +147,25 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
return error;
|
||||
};
|
||||
|
||||
processResult = (data: any, target: any): SeriesData[] => {
|
||||
const series: SeriesData[] = [];
|
||||
processResult = (data: any, target: any): DataFrame[] => {
|
||||
const series: DataFrame[] = [];
|
||||
|
||||
if (Object.keys(data).length === 0) {
|
||||
return series;
|
||||
}
|
||||
|
||||
if (!data.streams) {
|
||||
return [{ ...logStreamToSeriesData(data), refId: target.refId }];
|
||||
return [{ ...logStreamToDataFrame(data), refId: target.refId }];
|
||||
}
|
||||
|
||||
for (const stream of data.streams || []) {
|
||||
const seriesData = logStreamToSeriesData(stream);
|
||||
seriesData.refId = target.refId;
|
||||
seriesData.meta = {
|
||||
const dataFrame = logStreamToDataFrame(stream);
|
||||
dataFrame.refId = target.refId;
|
||||
dataFrame.meta = {
|
||||
searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)),
|
||||
limit: this.maxLines,
|
||||
};
|
||||
series.push(seriesData);
|
||||
series.push(dataFrame);
|
||||
}
|
||||
|
||||
return series;
|
||||
@@ -233,7 +233,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
);
|
||||
|
||||
return Promise.all(queries).then((results: any[]) => {
|
||||
let series: SeriesData[] = [];
|
||||
let series: DataFrame[] = [];
|
||||
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const result = results[i];
|
||||
@@ -328,14 +328,14 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
|
||||
(options && options.limit) || 10,
|
||||
(options && options.direction) || 'BACKWARD'
|
||||
);
|
||||
const series: SeriesData[] = [];
|
||||
const series: DataFrame[] = [];
|
||||
|
||||
try {
|
||||
const result = await this._request('/api/prom/query', target);
|
||||
if (result.data) {
|
||||
for (const stream of result.data.streams || []) {
|
||||
const seriesData = logStreamToSeriesData(stream);
|
||||
series.push(seriesData);
|
||||
const dataFrame = logStreamToDataFrame(stream);
|
||||
series.push(dataFrame);
|
||||
}
|
||||
}
|
||||
if (options && options.direction === 'FORWARD') {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { logStreamToSeriesData } from './result_transformer';
|
||||
import { logStreamToDataFrame } from './result_transformer';
|
||||
|
||||
describe('convert loki response to SeriesData', () => {
|
||||
describe('convert loki response to DataFrame', () => {
|
||||
const streams = [
|
||||
{
|
||||
labels: '{foo="bar"}',
|
||||
@@ -22,7 +22,7 @@ describe('convert loki response to SeriesData', () => {
|
||||
},
|
||||
];
|
||||
it('converts streams to series', () => {
|
||||
const data = streams.map(stream => logStreamToSeriesData(stream));
|
||||
const data = streams.map(stream => logStreamToDataFrame(stream));
|
||||
|
||||
expect(data.length).toBe(2);
|
||||
expect(data[0].labels['foo']).toEqual('bar');
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { LokiLogsStream } from './types';
|
||||
import { SeriesData, parseLabels, FieldType, Labels } from '@grafana/ui';
|
||||
import { DataFrame, parseLabels, FieldType, Labels } from '@grafana/ui';
|
||||
|
||||
export function logStreamToSeriesData(stream: LokiLogsStream): SeriesData {
|
||||
export function logStreamToDataFrame(stream: LokiLogsStream): DataFrame {
|
||||
let labels: Labels = stream.parsedLabels;
|
||||
if (!labels && stream.labels) {
|
||||
labels = parseLabels(stream.labels);
|
||||
|
||||
@@ -15,7 +15,7 @@ import BracesPlugin from 'app/features/explore/slate-plugins/braces';
|
||||
import QueryField, { TypeaheadInput, QueryFieldState } from 'app/features/explore/QueryField';
|
||||
import { PromQuery, PromContext, PromOptions } from '../types';
|
||||
import { CancelablePromise, makePromiseCancelable } from 'app/core/utils/CancelablePromise';
|
||||
import { ExploreQueryFieldProps, DataSourceStatus, QueryHint, isSeriesData, toLegacyResponseData } from '@grafana/ui';
|
||||
import { ExploreQueryFieldProps, DataSourceStatus, QueryHint, isDataFrame, toLegacyResponseData } from '@grafana/ui';
|
||||
import { PrometheusDatasource } from '../datasource';
|
||||
|
||||
const HISTOGRAM_GROUP = '__histograms__';
|
||||
@@ -181,7 +181,7 @@ class PromQueryField extends React.PureComponent<PromQueryFieldProps, PromQueryF
|
||||
return;
|
||||
}
|
||||
|
||||
const result = isSeriesData(queryResponse.series[0])
|
||||
const result = isDataFrame(queryResponse.series[0])
|
||||
? queryResponse.series.map(toLegacyResponseData)
|
||||
: queryResponse.series;
|
||||
const hints = datasource.getQueryHints(query, result);
|
||||
|
||||
@@ -2,7 +2,7 @@ import defaults from 'lodash/defaults';
|
||||
import {
|
||||
DataQueryRequest,
|
||||
FieldType,
|
||||
SeriesData,
|
||||
DataFrame,
|
||||
DataQueryResponse,
|
||||
DataQueryError,
|
||||
DataStreamObserver,
|
||||
@@ -171,14 +171,14 @@ export class SignalWorker extends StreamWorker {
|
||||
return row;
|
||||
};
|
||||
|
||||
initBuffer(refId: string): SeriesData {
|
||||
initBuffer(refId: string): DataFrame {
|
||||
const { speed, buffer } = this.query;
|
||||
const data = {
|
||||
fields: [{ name: 'Time', type: FieldType.time }, { name: 'Value', type: FieldType.number }],
|
||||
rows: [],
|
||||
refId,
|
||||
name: 'Signal ' + refId,
|
||||
} as SeriesData;
|
||||
} as DataFrame;
|
||||
|
||||
for (let i = 0; i < this.bands; i++) {
|
||||
const suffix = this.bands > 1 ? ` ${i + 1}` : '';
|
||||
@@ -260,7 +260,7 @@ export class FetchWorker extends StreamWorker {
|
||||
return this.reader.read().then(this.processChunk);
|
||||
};
|
||||
|
||||
onHeader = (series: SeriesData) => {
|
||||
onHeader = (series: DataFrame) => {
|
||||
series.refId = this.refId;
|
||||
this.stream.series = [series];
|
||||
};
|
||||
@@ -323,14 +323,14 @@ export class LogsWorker extends StreamWorker {
|
||||
return [time, '[' + this.getRandomLogLevel() + '] ' + this.getRandomLine()];
|
||||
};
|
||||
|
||||
initBuffer(refId: string): SeriesData {
|
||||
initBuffer(refId: string): DataFrame {
|
||||
const { speed, buffer } = this.query;
|
||||
const data = {
|
||||
fields: [{ name: 'Time', type: FieldType.time }, { name: 'Line', type: FieldType.string }],
|
||||
rows: [],
|
||||
refId,
|
||||
name: 'Logs ' + refId,
|
||||
} as SeriesData;
|
||||
} as DataFrame;
|
||||
|
||||
const request = this.stream.request;
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import _ from 'lodash';
|
||||
import { TimeRange, colors, getColorFromHexRgbOrName, FieldCache, FieldType, Field, SeriesData } from '@grafana/ui';
|
||||
import { TimeRange, colors, getColorFromHexRgbOrName, FieldCache, FieldType, Field, DataFrame } from '@grafana/ui';
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
import config from 'app/core/config';
|
||||
|
||||
type Options = {
|
||||
dataList: SeriesData[];
|
||||
dataList: DataFrame[];
|
||||
range?: TimeRange;
|
||||
};
|
||||
|
||||
|
||||
@@ -11,8 +11,8 @@ import { DataProcessor } from './data_processor';
|
||||
import { axesEditorComponent } from './axes_editor';
|
||||
import config from 'app/core/config';
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
import { getColorFromHexRgbOrName, LegacyResponseData, SeriesData, DataLink, VariableSuggestion } from '@grafana/ui';
|
||||
import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState';
|
||||
import { getColorFromHexRgbOrName, LegacyResponseData, DataFrame, DataLink, VariableSuggestion } from '@grafana/ui';
|
||||
import { getProcessedDataFrame } from 'app/features/dashboard/state/PanelQueryState';
|
||||
import { PanelQueryRunnerFormat } from 'app/features/dashboard/state/PanelQueryRunner';
|
||||
import { GraphContextMenuCtrl } from './GraphContextMenuCtrl';
|
||||
import { getDataLinksVariableSuggestions } from 'app/features/panel/panellinks/link_srv';
|
||||
@@ -26,7 +26,7 @@ class GraphCtrl extends MetricsPanelCtrl {
|
||||
renderError: boolean;
|
||||
hiddenSeries: any = {};
|
||||
seriesList: TimeSeries[] = [];
|
||||
dataList: SeriesData[] = [];
|
||||
dataList: DataFrame[] = [];
|
||||
annotations: any = [];
|
||||
alertState: any;
|
||||
|
||||
@@ -209,12 +209,12 @@ class GraphCtrl extends MetricsPanelCtrl {
|
||||
|
||||
// This should only be called from the snapshot callback
|
||||
onDataReceived(dataList: LegacyResponseData[]) {
|
||||
this.handleSeriesData(getProcessedSeriesData(dataList));
|
||||
this.handleDataFrame(getProcessedDataFrame(dataList));
|
||||
}
|
||||
|
||||
// Directly support SeriesData skipping event callbacks
|
||||
handleSeriesData(data: SeriesData[]) {
|
||||
super.handleSeriesData(data);
|
||||
// Directly support DataFrame skipping event callbacks
|
||||
handleDataFrame(data: DataFrame[]) {
|
||||
super.handleDataFrame(data);
|
||||
|
||||
this.dataList = data;
|
||||
this.seriesList = this.processor.getSeriesList({
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { DataProcessor } from '../data_processor';
|
||||
import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState';
|
||||
import { getProcessedDataFrame } from 'app/features/dashboard/state/PanelQueryState';
|
||||
|
||||
describe('Graph DataProcessor', () => {
|
||||
const panel: any = {
|
||||
@@ -11,7 +11,7 @@ describe('Graph DataProcessor', () => {
|
||||
|
||||
describe('getTimeSeries from LegacyResponseData', () => {
|
||||
// Try each type of data
|
||||
const dataList = getProcessedSeriesData([
|
||||
const dataList = getProcessedDataFrame([
|
||||
{
|
||||
alias: 'First (time_series)',
|
||||
datapoints: [[1, 1001], [2, 1002], [3, 1003]],
|
||||
|
||||
@@ -319,19 +319,19 @@ export class HeatmapCtrl extends MetricsPanelCtrl {
|
||||
this.render();
|
||||
}
|
||||
|
||||
seriesHandler(seriesData: any) {
|
||||
if (seriesData.datapoints === undefined) {
|
||||
seriesHandler(dataFrame: any) {
|
||||
if (dataFrame.datapoints === undefined) {
|
||||
throw new Error('Heatmap error: data should be a time series');
|
||||
}
|
||||
|
||||
const series = new TimeSeries({
|
||||
datapoints: seriesData.datapoints,
|
||||
alias: seriesData.target,
|
||||
datapoints: dataFrame.datapoints,
|
||||
alias: dataFrame.target,
|
||||
});
|
||||
|
||||
series.flotpairs = series.getFlotPairs(this.panel.nullPointMode);
|
||||
|
||||
const datapoints = seriesData.datapoints || [];
|
||||
const datapoints = dataFrame.datapoints || [];
|
||||
if (datapoints && datapoints.length > 0) {
|
||||
const last = datapoints[datapoints.length - 1][1];
|
||||
const from = this.range.from;
|
||||
|
||||
@@ -140,10 +140,10 @@ class SingleStatCtrl extends MetricsPanelCtrl {
|
||||
this.render();
|
||||
}
|
||||
|
||||
seriesHandler(seriesData: any) {
|
||||
seriesHandler(dataFrame: any) {
|
||||
const series = new TimeSeries({
|
||||
datapoints: seriesData.datapoints || [],
|
||||
alias: seriesData.target,
|
||||
datapoints: dataFrame.datapoints || [],
|
||||
alias: dataFrame.target,
|
||||
});
|
||||
|
||||
series.flotpairs = series.getFlotPairs(this.panel.nullPointMode);
|
||||
|
||||
Reference in New Issue
Block a user