Prometheus: refactor to DataFrame (#27737)

* Add typing to prometheus response

* Refactor result_transformer to return DataFrame

* Refactor + test fixes

* Fix Prometheus data source test

* Modify heatmap function + add back tests

* Update performInstantQuery return type

* Remove duplicate code from result_transformer

* Address review comments

* Update metric labels retrival logic to be safer
This commit is contained in:
Zoltán Bedi 2020-10-01 12:58:06 +02:00 committed by GitHub
parent f97f12f69d
commit a230aa1031
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 569 additions and 579 deletions

View File

@ -108,7 +108,7 @@ export interface FetchErrorDataProps {
export interface FetchError<T extends FetchErrorDataProps = any> { export interface FetchError<T extends FetchErrorDataProps = any> {
status: number; status: number;
statusText?: string; statusText?: string;
data: T | string; data: T;
cancelled?: boolean; cancelled?: boolean;
isHandled?: boolean; isHandled?: boolean;
config: BackendSrvRequest; config: BackendSrvRequest;

View File

@ -681,32 +681,32 @@ describe('PrometheusDatasource', () => {
it('should be same length', () => { it('should be same length', () => {
expect(results.data.length).toBe(2); expect(results.data.length).toBe(2);
expect(results.data[0].datapoints.length).toBe((end - start) / step + 1); expect(results.data[0].length).toBe((end - start) / step + 1);
expect(results.data[1].datapoints.length).toBe((end - start) / step + 1); expect(results.data[1].length).toBe((end - start) / step + 1);
}); });
it('should fill null until first datapoint in response', () => { it('should fill null until first datapoint in response', () => {
expect(results.data[0].datapoints[0][1]).toBe(start * 1000); expect(results.data[0].fields[0].values.get(0)).toBe(start * 1000);
expect(results.data[0].datapoints[0][0]).toBe(null); expect(results.data[0].fields[1].values.get(0)).toBe(null);
expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000); expect(results.data[0].fields[0].values.get(1)).toBe((start + step * 1) * 1000);
expect(results.data[0].datapoints[1][0]).toBe(3846); expect(results.data[0].fields[1].values.get(1)).toBe(3846);
}); });
it('should fill null after last datapoint in response', () => { it('should fill null after last datapoint in response', () => {
const length = (end - start) / step + 1; const length = (end - start) / step + 1;
expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000); expect(results.data[0].fields[0].values.get(length - 2)).toBe((end - step * 1) * 1000);
expect(results.data[0].datapoints[length - 2][0]).toBe(3848); expect(results.data[0].fields[1].values.get(length - 2)).toBe(3848);
expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000); expect(results.data[0].fields[0].values.get(length - 1)).toBe(end * 1000);
expect(results.data[0].datapoints[length - 1][0]).toBe(null); expect(results.data[0].fields[1].values.get(length - 1)).toBe(null);
}); });
it('should fill null at gap between series', () => { it('should fill null at gap between series', () => {
expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000); expect(results.data[0].fields[0].values.get(2)).toBe((start + step * 2) * 1000);
expect(results.data[0].datapoints[2][0]).toBe(null); expect(results.data[0].fields[1].values.get(2)).toBe(null);
expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000); expect(results.data[1].fields[0].values.get(1)).toBe((start + step * 1) * 1000);
expect(results.data[1].datapoints[1][0]).toBe(null); expect(results.data[1].fields[1].values.get(1)).toBe(null);
expect(results.data[1].datapoints[3][1]).toBe((start + step * 3) * 1000); expect(results.data[1].fields[0].values.get(3)).toBe((start + step * 3) * 1000);
expect(results.data[1].datapoints[3][0]).toBe(null); expect(results.data[1].fields[1].values.get(3)).toBe(null);
}); });
}); });

View File

@ -1,64 +1,47 @@
// Libraries
import cloneDeep from 'lodash/cloneDeep';
import LRU from 'lru-cache';
// Services & Utils
import { import {
AnnotationEvent, AnnotationEvent,
CoreApp, CoreApp,
DataQueryError, DataQueryError,
DataQueryRequest, DataQueryRequest,
DataQueryResponse, DataQueryResponse,
DataQueryResponseData,
DataSourceApi, DataSourceApi,
DataSourceInstanceSettings, DataSourceInstanceSettings,
dateMath, dateMath,
DateTime, DateTime,
LoadingState, LoadingState,
rangeUtil,
ScopedVars, ScopedVars,
TimeRange, TimeRange,
TimeSeries,
rangeUtil,
} from '@grafana/data'; } from '@grafana/data';
import { forkJoin, merge, Observable, of, throwError } from 'rxjs'; import { BackendSrvRequest, FetchError, getBackendSrv } from '@grafana/runtime';
import { catchError, filter, map, tap } from 'rxjs/operators';
import PrometheusMetricFindQuery from './metric_find_query';
import { ResultTransformer } from './result_transformer';
import PrometheusLanguageProvider from './language_provider';
import { BackendSrvRequest, getBackendSrv } from '@grafana/runtime';
import addLabelToQuery from './add_label_to_query';
import { getQueryHints } from './query_hints';
import { expandRecordingRules } from './language_utils';
// Types
import { PromOptions, PromQuery, PromQueryRequest } from './types';
import { safeStringifyValue } from 'app/core/utils/explore'; import { safeStringifyValue } from 'app/core/utils/explore';
import templateSrv from 'app/features/templating/template_srv';
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
import TableModel from 'app/core/table_model'; import templateSrv from 'app/features/templating/template_srv';
import { defaults } from 'lodash'; import cloneDeep from 'lodash/cloneDeep';
import defaults from 'lodash/defaults';
import LRU from 'lru-cache';
import { forkJoin, merge, Observable, of, pipe, throwError } from 'rxjs';
import { catchError, filter, map, tap } from 'rxjs/operators';
import addLabelToQuery from './add_label_to_query';
import PrometheusLanguageProvider from './language_provider';
import { expandRecordingRules } from './language_utils';
import PrometheusMetricFindQuery from './metric_find_query';
import { getQueryHints } from './query_hints';
import { getOriginalMetricName, renderTemplate, transform } from './result_transformer';
import {
isFetchErrorResponse,
PromDataErrorResponse,
PromDataSuccessResponse,
PromMatrixData,
PromOptions,
PromQuery,
PromQueryRequest,
PromScalarData,
PromVectorData,
} from './types';
export const ANNOTATION_QUERY_STEP_DEFAULT = '60s'; export const ANNOTATION_QUERY_STEP_DEFAULT = '60s';
export interface PromDataQueryResponse {
data: {
status: string;
data: {
resultType: string;
results?: DataQueryResponseData[];
result?: DataQueryResponseData[];
};
};
cancelled?: boolean;
}
export interface PromLabelQueryResponse {
data: {
status: string;
data: string[];
};
cancelled?: boolean;
}
export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions> { export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions> {
type: string; type: string;
editorSrc: string; editorSrc: string;
@ -73,7 +56,6 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
httpMethod: string; httpMethod: string;
languageProvider: PrometheusLanguageProvider; languageProvider: PrometheusLanguageProvider;
lookupsDisabled: boolean; lookupsDisabled: boolean;
resultTransformer: ResultTransformer;
customQueryParameters: any; customQueryParameters: any;
constructor(instanceSettings: DataSourceInstanceSettings<PromOptions>) { constructor(instanceSettings: DataSourceInstanceSettings<PromOptions>) {
@ -88,7 +70,6 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
this.queryTimeout = instanceSettings.jsonData.queryTimeout; this.queryTimeout = instanceSettings.jsonData.queryTimeout;
this.httpMethod = instanceSettings.jsonData.httpMethod || 'GET'; this.httpMethod = instanceSettings.jsonData.httpMethod || 'GET';
this.directUrl = instanceSettings.jsonData.directUrl; this.directUrl = instanceSettings.jsonData.directUrl;
this.resultTransformer = new ResultTransformer(templateSrv);
this.ruleMappings = {}; this.ruleMappings = {};
this.languageProvider = new PrometheusLanguageProvider(this); this.languageProvider = new PrometheusLanguageProvider(this);
this.lookupsDisabled = instanceSettings.jsonData.disableMetricsLookup ?? false; this.lookupsDisabled = instanceSettings.jsonData.disableMetricsLookup ?? false;
@ -172,38 +153,6 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
return templateSrv.variableExists(target.expr); return templateSrv.variableExists(target.expr);
} }
processResult = (
response: any,
query: PromQueryRequest,
target: PromQuery,
responseListLength: number,
scopedVars?: ScopedVars,
mixedQueries?: boolean
) => {
// Keeping original start/end for transformers
const transformerOptions = {
format: target.format,
step: query.step,
legendFormat: target.legendFormat,
start: query.start,
end: query.end,
query: query.expr,
responseListLength,
scopedVars,
refId: target.refId,
valueWithRefId: target.valueWithRefId,
meta: {
/** Fix for showing of Prometheus results in Explore table.
* We want to show result of instant query always in table and result of range query based on target.runAll;
*/
preferredVisualisationType: target.instant ? 'table' : mixedQueries ? 'graph' : undefined,
},
};
const series = this.resultTransformer.transform(response, transformerOptions);
return series;
};
prepareTargets = (options: DataQueryRequest<PromQuery>, start: number, end: number) => { prepareTargets = (options: DataQueryRequest<PromQuery>, start: number, end: number) => {
const queries: PromQueryRequest[] = []; const queries: PromQueryRequest[] = [];
const activeTargets: PromQuery[] = []; const activeTargets: PromQuery[] = [];
@ -283,17 +232,13 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
const subQueries = queries.map((query, index) => { const subQueries = queries.map((query, index) => {
const target = activeTargets[index]; const target = activeTargets[index];
let observable = query.instant const filterAndMapResponse = pipe(
? this.performInstantQuery(query, end)
: this.performTimeSeriesQuery(query, query.start, query.end);
return observable.pipe(
// Decrease the counter here. We assume that each request returns only single value and then completes // Decrease the counter here. We assume that each request returns only single value and then completes
// (should hold until there is some streaming requests involved). // (should hold until there is some streaming requests involved).
tap(() => runningQueriesCount--), tap(() => runningQueriesCount--),
filter((response: any) => (response.cancelled ? false : true)), filter((response: any) => (response.cancelled ? false : true)),
map((response: any) => { map((response: any) => {
const data = this.processResult(response, query, target, queries.length, undefined, mixedQueries); const data = transform(response, { query, target, responseListLength: queries.length, mixedQueries });
return { return {
data, data,
key: query.requestId, key: query.requestId,
@ -301,6 +246,12 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
} as DataQueryResponse; } as DataQueryResponse;
}) })
); );
if (query.instant) {
return this.performInstantQuery(query, end).pipe(filterAndMapResponse);
}
return this.performTimeSeriesQuery(query, query.start, query.end).pipe(filterAndMapResponse);
}); });
return merge(...subQueries); return merge(...subQueries);
@ -313,24 +264,26 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
requestId: string, requestId: string,
scopedVars: ScopedVars scopedVars: ScopedVars
) { ) {
const observables: Array<Observable<Array<TableModel | TimeSeries>>> = queries.map((query, index) => { const observables = queries.map((query, index) => {
const target = activeTargets[index]; const target = activeTargets[index];
let observable = query.instant const filterAndMapResponse = pipe(
? this.performInstantQuery(query, end)
: this.performTimeSeriesQuery(query, query.start, query.end);
return observable.pipe(
filter((response: any) => (response.cancelled ? false : true)), filter((response: any) => (response.cancelled ? false : true)),
map((response: any) => { map((response: any) => {
const data = this.processResult(response, query, target, queries.length, scopedVars); const data = transform(response, { query, target, responseListLength: queries.length, scopedVars });
return data; return data;
}) })
); );
if (query.instant) {
return this.performInstantQuery(query, end).pipe(filterAndMapResponse);
}
return this.performTimeSeriesQuery(query, query.start, query.end).pipe(filterAndMapResponse);
}); });
return forkJoin(observables).pipe( return forkJoin(observables).pipe(
map((results: Array<Array<TableModel | TimeSeries>>) => { map(results => {
const data = results.reduce((result, current) => { const data = results.reduce((result, current) => {
return [...result, ...current]; return [...result, ...current];
}, []); }, []);
@ -465,8 +418,11 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
} }
} }
return this._request(url, data, { requestId: query.requestId, headers: query.headers }).pipe( return this._request<PromDataSuccessResponse<PromMatrixData>>(url, data, {
catchError(err => { requestId: query.requestId,
headers: query.headers,
}).pipe(
catchError((err: FetchError<PromDataErrorResponse<PromMatrixData>>) => {
if (err.cancelled) { if (err.cancelled) {
return of(err); return of(err);
} }
@ -493,8 +449,11 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
} }
} }
return this._request(url, data, { requestId: query.requestId, headers: query.headers }).pipe( return this._request<PromDataSuccessResponse<PromVectorData | PromScalarData>>(url, data, {
catchError(err => { requestId: query.requestId,
headers: query.headers,
}).pipe(
catchError((err: FetchError<PromDataErrorResponse<PromVectorData | PromScalarData>>) => {
if (err.cancelled) { if (err.cancelled) {
return of(err); return of(err);
} }
@ -587,17 +546,11 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
}; };
const query = this.createQuery(queryModel, queryOptions, start, end); const query = this.createQuery(queryModel, queryOptions, start, end);
const response = await this.performTimeSeriesQuery(query, query.start, query.end).toPromise();
const self = this;
const response: PromDataQueryResponse = await this.performTimeSeriesQuery(
query,
query.start,
query.end
).toPromise();
const eventList: AnnotationEvent[] = []; const eventList: AnnotationEvent[] = [];
const splitKeys = tagKeys.split(','); const splitKeys = tagKeys.split(',');
if (response.cancelled) { if (isFetchErrorResponse(response) && response.cancelled) {
return []; return [];
} }
@ -620,8 +573,8 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
value[0] = timestampValue; value[0] = timestampValue;
}); });
const activeValues = series.values.filter((value: Record<number, string>) => parseFloat(value[1]) >= 1); const activeValues = series.values.filter(value => parseFloat(value[1]) >= 1);
const activeValuesTimestamps: number[] = activeValues.map((value: number[]) => value[0]); const activeValuesTimestamps = activeValues.map(value => value[0]);
// Instead of creating singular annotation for each active event we group events into region if they are less // Instead of creating singular annotation for each active event we group events into region if they are less
// then `step` apart. // then `step` apart.
@ -644,9 +597,9 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
time: timestamp, time: timestamp,
timeEnd: timestamp, timeEnd: timestamp,
annotation, annotation,
title: self.resultTransformer.renderTemplate(titleFormat, series.metric), title: renderTemplate(titleFormat, series.metric),
tags, tags,
text: self.resultTransformer.renderTemplate(textFormat, series.metric), text: renderTemplate(textFormat, series.metric),
}; };
} }
@ -676,7 +629,7 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
const response = await this.performInstantQuery(query, now / 1000).toPromise(); const response = await this.performInstantQuery(query, now / 1000).toPromise();
return response.data.status === 'success' return response.data.status === 'success'
? { status: 'success', message: 'Data source is working' } ? { status: 'success', message: 'Data source is working' }
: { status: 'error', message: response.error }; : { status: 'error', message: response.data.error };
} }
interpolateVariablesInQueries(queries: PromQuery[], scopedVars: ScopedVars): PromQuery[] { interpolateVariablesInQueries(queries: PromQuery[], scopedVars: ScopedVars): PromQuery[] {
@ -764,7 +717,7 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
} }
getOriginalMetricName(labelData: { [key: string]: string }) { getOriginalMetricName(labelData: { [key: string]: string }) {
return this.resultTransformer.getOriginalMetricName(labelData); return getOriginalMetricName(labelData);
} }
} }

View File

@ -1,2 +0,0 @@
declare var test: any;
export default test;

View File

@ -1,7 +1,7 @@
import _ from 'lodash'; import _ from 'lodash';
import { map } from 'rxjs/operators'; import { map } from 'rxjs/operators';
import { MetricFindValue, TimeRange } from '@grafana/data'; import { MetricFindValue, TimeRange } from '@grafana/data';
import { PromDataQueryResponse, PrometheusDatasource } from './datasource'; import { PrometheusDatasource } from './datasource';
import { PromQueryRequest } from './types'; import { PromQueryRequest } from './types';
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
@ -137,7 +137,7 @@ export default class PrometheusMetricFindQuery {
const end = this.datasource.getPrometheusTime(this.range.to, true); const end = this.datasource.getPrometheusTime(this.range.to, true);
const instantQuery: PromQueryRequest = { expr: query } as PromQueryRequest; const instantQuery: PromQueryRequest = { expr: query } as PromQueryRequest;
return this.datasource.performInstantQuery(instantQuery, end).pipe( return this.datasource.performInstantQuery(instantQuery, end).pipe(
map((result: PromDataQueryResponse) => { map(result => {
return _.map(result.data.data.result, metricData => { return _.map(result.data.data.result, metricData => {
let text = metricData.metric.__name__ || ''; let text = metricData.metric.__name__ || '';
delete metricData.metric.__name__; delete metricData.metric.__name__;

View File

@ -1,38 +1,30 @@
import { ResultTransformer } from './result_transformer'; import { DataFrame } from '@grafana/data';
import { DataQueryResponseData } from '@grafana/data'; import { transform } from './result_transformer';
describe('Prometheus Result Transformer', () => { describe('Prometheus Result Transformer', () => {
const ctx: any = {}; const options: any = { target: {}, query: {} };
beforeEach(() => {
ctx.templateSrv = {
replace: (str: string) => str,
};
ctx.resultTransformer = new ResultTransformer(ctx.templateSrv);
});
describe('When nothing is returned', () => { describe('When nothing is returned', () => {
test('should return empty series', () => { it('should return empty array', () => {
const response = { const response = {
status: 'success', status: 'success',
data: { data: {
resultType: '', resultType: '',
result: (null as unknown) as DataQueryResponseData[], result: null,
}, },
}; };
const series = ctx.resultTransformer.transform({ data: response }, {}); const series = transform({ data: response } as any, options);
expect(series).toEqual([]); expect(series).toEqual([]);
}); });
test('should return empty table', () => { it('should return empty array', () => {
const response = { const response = {
status: 'success', status: 'success',
data: { data: {
resultType: '', resultType: '',
result: (null as unknown) as DataQueryResponseData[], result: null,
}, },
}; };
const table = ctx.resultTransformer.transform({ data: response }, { format: 'table' }); const result = transform({ data: response } as any, { ...options, target: { format: 'table' } });
expect(table).toMatchObject([{ type: 'table', rows: [] }]); expect(result).toHaveLength(0);
}); });
}); });
@ -44,48 +36,65 @@ describe('Prometheus Result Transformer', () => {
result: [ result: [
{ {
metric: { __name__: 'test', job: 'testjob' }, metric: { __name__: 'test', job: 'testjob' },
values: [[1443454528, '3846']], values: [
[1443454528, '3846'],
[1443454530, '3848'],
],
}, },
{ {
metric: { metric: {
__name__: 'test', __name__: 'test2',
instance: 'localhost:8080', instance: 'localhost:8080',
job: 'otherjob', job: 'otherjob',
}, },
values: [[1443454529, '3847']], values: [
[1443454529, '3847'],
[1443454531, '3849'],
],
}, },
], ],
}, },
}; };
it('should return table model', () => { it('should return data frame', () => {
const table = ctx.resultTransformer.transformMetricDataToTable(response.data.result, 0, 'A'); const result = transform({ data: response } as any, {
expect(table.type).toBe('table'); ...options,
expect(table.rows).toEqual([ target: {
[1443454528000, 'test', '', 'testjob', 3846], responseListLength: 0,
[1443454529000, 'test', 'localhost:8080', 'otherjob', 3847], refId: 'A',
format: 'table',
},
});
expect(result[0].fields[0].values.toArray()).toEqual([
1443454528000,
1443454530000,
1443454529000,
1443454531000,
]); ]);
expect(table.columns).toMatchObject([ expect(result[0].fields[0].name).toBe('Time');
{ text: 'Time', type: 'time' }, expect(result[0].fields[1].values.toArray()).toEqual(['test', 'test', 'test2', 'test2']);
{ text: '__name__', filterable: true }, expect(result[0].fields[1].name).toBe('__name__');
{ text: 'instance', filterable: true }, expect(result[0].fields[1].config.filterable).toBe(true);
{ text: 'job' }, expect(result[0].fields[2].values.toArray()).toEqual(['', '', 'localhost:8080', 'localhost:8080']);
{ text: 'Value' }, expect(result[0].fields[2].name).toBe('instance');
]); expect(result[0].fields[3].values.toArray()).toEqual(['testjob', 'testjob', 'otherjob', 'otherjob']);
expect(table.columns[4].filterable).toBeUndefined(); expect(result[0].fields[3].name).toBe('job');
expect(table.refId).toBe('A'); expect(result[0].fields[4].values.toArray()).toEqual([3846, 3848, 3847, 3849]);
expect(result[0].fields[4].name).toEqual('Value');
expect(result[0].refId).toBe('A');
}); });
it('should column title include refId if response count is more than 2', () => { it('should include refId if response count is more than 2', () => {
const table = ctx.resultTransformer.transformMetricDataToTable(response.data.result, 2, 'B'); const result = transform({ data: response } as any, {
expect(table.type).toBe('table'); ...options,
expect(table.columns).toMatchObject([ target: {
{ text: 'Time', type: 'time' }, refId: 'B',
{ text: '__name__' }, format: 'table',
{ text: 'instance' }, },
{ text: 'job' }, responseListLength: 2,
{ text: 'Value #B' }, });
]);
expect(result[0].fields[4].name).toEqual('Value #B');
}); });
}); });
@ -103,31 +112,37 @@ describe('Prometheus Result Transformer', () => {
}, },
}; };
it('should return table model', () => { it('should return data frame', () => {
const table = ctx.resultTransformer.transformMetricDataToTable(response.data.result); const result = transform({ data: response } as any, { ...options, target: { format: 'table' } });
expect(table.type).toBe('table'); expect(result[0].fields[0].values.toArray()).toEqual([1443454528000]);
expect(table.rows).toEqual([[1443454528000, 'test', 'testjob', 3846]]); expect(result[0].fields[0].name).toBe('Time');
expect(table.columns).toMatchObject([ expect(result[0].fields[1].values.toArray()).toEqual(['test']);
{ text: 'Time', type: 'time' }, expect(result[0].fields[1].name).toBe('__name__');
{ text: '__name__' }, expect(result[0].fields[2].values.toArray()).toEqual(['testjob']);
{ text: 'job' }, expect(result[0].fields[2].name).toBe('job');
{ text: 'Value' }, expect(result[0].fields[3].values.toArray()).toEqual([3846]);
]); expect(result[0].fields[3].name).toEqual('Value');
}); });
it('should return table model with le label values parsed as numbers', () => { it('should return le label values parsed as numbers', () => {
const table = ctx.resultTransformer.transformMetricDataToTable([ const response = {
{ status: 'success',
metric: { le: '102' }, data: {
value: [1594908838, '0'], resultType: 'vector',
result: [
{
metric: { le: '102' },
value: [1594908838, '0'],
},
],
}, },
]); };
expect(table.type).toBe('table'); const result = transform({ data: response } as any, { ...options, target: { format: 'table' } });
expect(table.rows).toEqual([[1594908838000, 102, 0]]); expect(result[0].fields[1].values.toArray()).toEqual([102]);
}); });
}); });
describe('When resultFormat is time series and instant = true', () => { describe('When instant = true', () => {
const response = { const response = {
status: 'success', status: 'success',
data: { data: {
@ -141,158 +156,99 @@ describe('Prometheus Result Transformer', () => {
}, },
}; };
it('should return time series', () => { it('should return data frame', () => {
const timeSeries = ctx.resultTransformer.transform({ data: response }, {}); const result: DataFrame[] = transform({ data: response } as any, { ...options, query: { instant: true } });
expect(timeSeries[0].target).toBe('test{job="testjob"}'); expect(result[0].name).toBe('test{job="testjob"}');
expect(timeSeries[0].title).toBe('test{job="testjob"}');
}); });
}); });
describe('When resultFormat is heatmap', () => { describe('When resultFormat is heatmap', () => {
const response = { const getResponse = (result: any) => ({
status: 'success', status: 'success',
data: { data: {
resultType: 'matrix', resultType: 'matrix',
result: [ result,
{
metric: { __name__: 'test', job: 'testjob', le: '1' },
values: [
[1445000010, '10'],
[1445000020, '10'],
[1445000030, '0'],
],
},
{
metric: { __name__: 'test', job: 'testjob', le: '2' },
values: [
[1445000010, '20'],
[1445000020, '10'],
[1445000030, '30'],
],
},
{
metric: { __name__: 'test', job: 'testjob', le: '3' },
values: [
[1445000010, '30'],
[1445000020, '10'],
[1445000030, '40'],
],
},
],
}, },
});
const options = {
format: 'heatmap',
start: 1445000010,
end: 1445000030,
legendFormat: '{{le}}',
}; };
it('should convert cumulative histogram to regular', () => { it('should convert cumulative histogram to regular', () => {
const options = { const response = getResponse([
format: 'heatmap',
start: 1445000010,
end: 1445000030,
legendFormat: '{{le}}',
};
const result = ctx.resultTransformer.transform({ data: response }, options);
expect(result).toEqual([
{ {
target: '1', metric: { __name__: 'test', job: 'testjob', le: '1' },
title: '1', values: [
query: undefined, [1445000010, '10'],
datapoints: [ [1445000020, '10'],
[10, 1445000010000], [1445000030, '0'],
[10, 1445000020000],
[0, 1445000030000],
], ],
tags: { __name__: 'test', job: 'testjob', le: '1' },
}, },
{ {
target: '2', metric: { __name__: 'test', job: 'testjob', le: '2' },
title: '2', values: [
query: undefined, [1445000010, '20'],
datapoints: [ [1445000020, '10'],
[10, 1445000010000], [1445000030, '30'],
[0, 1445000020000],
[30, 1445000030000],
], ],
tags: { __name__: 'test', job: 'testjob', le: '2' },
}, },
{ {
target: '3', metric: { __name__: 'test', job: 'testjob', le: '3' },
title: '3', values: [
query: undefined, [1445000010, '30'],
datapoints: [ [1445000020, '10'],
[10, 1445000010000], [1445000030, '40'],
[0, 1445000020000],
[10, 1445000030000],
], ],
tags: { __name__: 'test', job: 'testjob', le: '3' },
}, },
]); ]);
const result = transform({ data: response } as any, { query: options, target: options } as any);
expect(result[0].fields[0].values.toArray()).toEqual([1445000010000, 1445000020000, 1445000030000]);
expect(result[0].fields[1].values.toArray()).toEqual([10, 10, 0]);
expect(result[1].fields[0].values.toArray()).toEqual([1445000010000, 1445000020000, 1445000030000]);
expect(result[1].fields[1].values.toArray()).toEqual([10, 0, 30]);
expect(result[2].fields[0].values.toArray()).toEqual([1445000010000, 1445000020000, 1445000030000]);
expect(result[2].fields[1].values.toArray()).toEqual([10, 0, 10]);
}); });
it('should handle missing datapoints', () => { it('should handle missing datapoints', () => {
const seriesList = [ const response = getResponse([
{ {
datapoints: [ metric: { __name__: 'test', job: 'testjob', le: '1' },
[1, 1000], values: [
[2, 2000], [1445000010, '1'],
[1445000020, '2'],
], ],
}, },
{ {
datapoints: [ metric: { __name__: 'test', job: 'testjob', le: '2' },
[2, 1000], values: [
[5, 2000], [1445000010, '2'],
[1, 3000], [1445000020, '5'],
[1445000030, '1'],
], ],
}, },
{ {
datapoints: [ metric: { __name__: 'test', job: 'testjob', le: '3' },
[3, 1000], values: [
[7, 2000], [1445000010, '3'],
[1445000020, '7'],
], ],
}, },
]; ]);
const expected = [ const result = transform({ data: response } as any, { query: options, target: options } as any);
{ expect(result[0].fields[1].values.toArray()).toEqual([1, 2]);
datapoints: [ expect(result[1].fields[1].values.toArray()).toEqual([1, 3, 1]);
[1, 1000], expect(result[2].fields[1].values.toArray()).toEqual([1, 2]);
[2, 2000],
],
},
{
datapoints: [
[1, 1000],
[3, 2000],
[1, 3000],
],
},
{
datapoints: [
[1, 1000],
[2, 2000],
],
},
];
const result = ctx.resultTransformer.transformToHistogramOverTime(seriesList);
expect(result).toEqual(expected);
});
it('should throw error when data in wrong format', () => {
const seriesList = [{ rows: [] as any[] }, { datapoints: [] as any[] }];
expect(() => {
ctx.resultTransformer.transformToHistogramOverTime(seriesList);
}).toThrow();
});
it('should throw error when prometheus returned non-timeseries', () => {
// should be { metric: {}, values: [] } for timeseries
const metricData = { metric: {}, value: [] as any[] };
expect(() => {
ctx.resultTransformer.transformMetricData(metricData, { step: 1 }, 1000, 2000);
}).toThrow();
}); });
}); });
describe('When resultFormat is time series', () => { describe('When the response is a matrix', () => {
it('should transform matrix into timeseries', () => { it('should transform into a data frame', () => {
const response = { const response = {
status: 'success', status: 'success',
data: { data: {
@ -309,31 +265,20 @@ describe('Prometheus Result Transformer', () => {
], ],
}, },
}; };
const options = {
format: 'timeseries',
start: 0,
end: 2,
refId: 'B',
};
const result = ctx.resultTransformer.transform({ data: response }, options); const result: DataFrame[] = transform({ data: response } as any, {
expect(result).toEqual([ ...options,
{ query: {
target: 'test{job="testjob"}', start: 0,
title: 'test{job="testjob"}', end: 2,
query: undefined,
datapoints: [
[10, 0],
[10, 1000],
[0, 2000],
],
tags: { job: 'testjob' },
refId: 'B',
}, },
]); });
expect(result[0].fields[0].values.toArray()).toEqual([0, 1000, 2000]);
expect(result[0].fields[1].values.toArray()).toEqual([10, 10, 0]);
expect(result[0].name).toBe('test{job="testjob"}');
}); });
it('should fill timeseries with null values', () => { it('should fill null values', () => {
const response = { const response = {
status: 'success', status: 'success',
data: { data: {
@ -349,27 +294,11 @@ describe('Prometheus Result Transformer', () => {
], ],
}, },
}; };
const options = {
format: 'timeseries',
step: 1,
start: 0,
end: 2,
};
const result = ctx.resultTransformer.transform({ data: response }, options); const result = transform({ data: response } as any, { ...options, query: { step: 1, start: 0, end: 2 } });
expect(result).toEqual([
{ expect(result[0].fields[0].values.toArray()).toEqual([0, 1000, 2000]);
target: 'test{job="testjob"}', expect(result[0].fields[1].values.toArray()).toEqual([null, 10, 0]);
title: 'test{job="testjob"}',
query: undefined,
datapoints: [
[null, 0],
[10, 1000],
[0, 2000],
],
tags: { job: 'testjob' },
},
]);
}); });
it('should use __name__ label as series name', () => { it('should use __name__ label as series name', () => {
@ -389,15 +318,15 @@ describe('Prometheus Result Transformer', () => {
}, },
}; };
const options = { const result = transform({ data: response } as any, {
format: 'timeseries', ...options,
step: 1, query: {
start: 0, step: 1,
end: 2, start: 0,
}; end: 2,
},
const result = ctx.resultTransformer.transform({ data: response }, options); });
expect(result[0].target).toEqual('test{job="testjob"}'); expect(result[0].name).toEqual('test{job="testjob"}');
}); });
it('should set frame name to undefined if no __name__ label but there are other labels', () => { it('should set frame name to undefined if no __name__ label but there are other labels', () => {
@ -417,17 +346,15 @@ describe('Prometheus Result Transformer', () => {
}, },
}; };
const options = { const result = transform({ data: response } as any, {
format: 'timeseries', ...options,
step: 1, query: {
query: 'Some query', step: 1,
start: 0, start: 0,
end: 2, end: 2,
}; },
});
const result = ctx.resultTransformer.transform({ data: response }, options); expect(result[0].name).toBe('{job="testjob"}');
expect(result[0].target).toBe('{job="testjob"}');
expect(result[0].tags.job).toEqual('testjob');
}); });
it('should align null values with step', () => { it('should align null values with step', () => {
@ -446,35 +373,10 @@ describe('Prometheus Result Transformer', () => {
], ],
}, },
}; };
const options = {
format: 'timeseries',
step: 2,
start: 0,
end: 8,
refId: 'A',
meta: { custom: { hello: '1' } },
};
const result = ctx.resultTransformer.transform({ data: response }, options); const result = transform({ data: response } as any, { ...options, query: { step: 2, start: 0, end: 8 } });
expect(result).toEqual([ expect(result[0].fields[0].values.toArray()).toEqual([0, 2000, 4000, 6000, 8000]);
{ expect(result[0].fields[1].values.toArray()).toEqual([null, null, 10, null, 10]);
target: 'test{job="testjob"}',
title: 'test{job="testjob"}',
meta: {
custom: { hello: '1' },
},
query: undefined,
refId: 'A',
datapoints: [
[null, 0],
[null, 2000],
[10, 4000],
[null, 6000],
[10, 8000],
],
tags: { job: 'testjob' },
},
]);
}); });
}); });
}); });

View File

@ -1,234 +1,296 @@
import _ from 'lodash'; import {
import TableModel from 'app/core/table_model'; ArrayVector,
import { TimeSeries, FieldType, Labels, formatLabels, QueryResultMeta } from '@grafana/data'; DataFrame,
import { TemplateSrv } from 'app/features/templating/template_srv'; Field,
FieldType,
formatLabels,
MutableField,
ScopedVars,
TIME_SERIES_TIME_FIELD_NAME,
TIME_SERIES_VALUE_FIELD_NAME,
} from '@grafana/data';
import { FetchResponse } from '@grafana/runtime';
import templateSrv from 'app/features/templating/template_srv';
import {
isMatrixData,
MatrixOrVectorResult,
PromDataSuccessResponse,
PromMetric,
PromQuery,
PromQueryRequest,
PromValue,
TransformOptions,
} from './types';
export class ResultTransformer { export function transform(
constructor(private templateSrv: TemplateSrv) {} response: FetchResponse<PromDataSuccessResponse>,
transformOptions: {
query: PromQueryRequest;
target: PromQuery;
responseListLength: number;
scopedVars?: ScopedVars;
mixedQueries?: boolean;
}
) {
// Create options object from transformOptions
const options: TransformOptions = {
format: transformOptions.target.format,
step: transformOptions.query.step,
legendFormat: transformOptions.target.legendFormat,
start: transformOptions.query.start,
end: transformOptions.query.end,
query: transformOptions.query.expr,
responseListLength: transformOptions.responseListLength,
scopedVars: transformOptions.scopedVars,
refId: transformOptions.target.refId,
valueWithRefId: transformOptions.target.valueWithRefId,
meta: {
/**
* Fix for showing of Prometheus results in Explore table.
* We want to show result of instant query always in table and result of range query based on target.runAll;
*/
preferredVisualisationType: getPreferredVisualisationType(
transformOptions.query.instant,
transformOptions.mixedQueries
),
},
};
const prometheusResult = response.data.data;
transform(response: any, options: any): Array<TableModel | TimeSeries> { if (!prometheusResult.result) {
const prometheusResult = response.data.data.result;
if (options.format === 'table') {
return [
this.transformMetricDataToTable(
prometheusResult,
options.responseListLength,
options.refId,
options.meta,
options.valueWithRefId
),
];
} else if (prometheusResult && options.format === 'heatmap') {
let seriesList: TimeSeries[] = [];
for (const metricData of prometheusResult) {
seriesList.push(this.transformMetricData(metricData, options, options.start, options.end));
}
seriesList.sort(sortSeriesByLabel);
seriesList = this.transformToHistogramOverTime(seriesList);
return seriesList;
} else if (prometheusResult) {
const seriesList: TimeSeries[] = [];
for (const metricData of prometheusResult) {
if (response.data.data.resultType === 'matrix') {
seriesList.push(this.transformMetricData(metricData, options, options.start, options.end));
} else if (response.data.data.resultType === 'vector') {
seriesList.push(this.transformInstantMetricData(metricData, options));
}
}
return seriesList;
}
return []; return [];
} }
transformMetricData(metricData: any, options: any, start: number, end: number): TimeSeries { // Return early if result type is scalar
const dps = []; if (prometheusResult.resultType === 'scalar') {
const { name, labels, title } = this.createLabelInfo(metricData.metric, options); return [
{
meta: options.meta,
refId: options.refId,
length: 1,
fields: [getTimeField([prometheusResult.result]), getValueField([prometheusResult.result])],
},
];
}
const stepMs = parseFloat(options.step) * 1000; // Return early again if the format is table, this needs special transformation.
let baseTimestamp = start * 1000; if (options.format === 'table') {
const tableData = transformMetricDataToTable(prometheusResult.result, options);
return [tableData];
}
if (metricData.values === undefined) { // Process matrix and vector results to DataFrame
throw new Error('Prometheus heatmap error: data should be a time series'); const dataFrame: DataFrame[] = [];
} prometheusResult.result.forEach((data: MatrixOrVectorResult) => dataFrame.push(transformToDataFrame(data, options)));
for (const value of metricData.values) { // When format is heatmap use the already created data frames and transform it more
if (options.format === 'heatmap') {
dataFrame.sort(sortSeriesByLabel);
const seriesList = transformToHistogramOverTime(dataFrame);
return seriesList;
}
// Return matrix or vector result as DataFrame[]
return dataFrame;
}
function getPreferredVisualisationType(isInstantQuery?: boolean, mixedQueries?: boolean) {
if (isInstantQuery) {
return 'table';
}
return mixedQueries ? 'graph' : undefined;
}
/**
* Transforms matrix and vector result from Prometheus result to DataFrame
*/
function transformToDataFrame(data: MatrixOrVectorResult, options: TransformOptions): DataFrame {
const { name } = createLabelInfo(data.metric, options);
const fields: Field[] = [];
if (isMatrixData(data)) {
const stepMs = options.step ? options.step * 1000 : NaN;
let baseTimestamp = options.start * 1000;
const dps: PromValue[] = [];
for (const value of data.values) {
let dpValue: number | null = parseFloat(value[1]); let dpValue: number | null = parseFloat(value[1]);
if (_.isNaN(dpValue)) { if (isNaN(dpValue)) {
dpValue = null; dpValue = null;
} }
const timestamp = parseFloat(value[0]) * 1000; const timestamp = value[0] * 1000;
for (let t = baseTimestamp; t < timestamp; t += stepMs) { for (let t = baseTimestamp; t < timestamp; t += stepMs) {
dps.push([null, t]); dps.push([t, null]);
} }
baseTimestamp = timestamp + stepMs; baseTimestamp = timestamp + stepMs;
dps.push([dpValue, timestamp]); dps.push([timestamp, dpValue]);
} }
const endTimestamp = end * 1000; const endTimestamp = options.end * 1000;
for (let t = baseTimestamp; t <= endTimestamp; t += stepMs) { for (let t = baseTimestamp; t <= endTimestamp; t += stepMs) {
dps.push([null, t]); dps.push([t, null]);
} }
fields.push(getTimeField(dps, true));
fields.push(getValueField(dps, undefined, false));
} else {
fields.push(getTimeField([data.value]));
fields.push(getValueField([data.value]));
}
return {
meta: options.meta,
refId: options.refId,
length: fields[0].values.length,
fields,
name,
};
}
function transformMetricDataToTable(md: MatrixOrVectorResult[], options: TransformOptions): DataFrame {
if (!md || md.length === 0) {
return { return {
datapoints: dps,
refId: options.refId,
target: name ?? '',
tags: labels,
title,
meta: options.meta, meta: options.meta,
refId: options.refId,
length: 0,
fields: [],
}; };
} }
transformMetricDataToTable( const valueText = options.responseListLength > 1 || options.valueWithRefId ? `Value #${options.refId}` : 'Value';
md: any,
resultCount: number,
refId: string,
meta: QueryResultMeta,
valueWithRefId?: boolean
): TableModel {
const table = new TableModel();
table.refId = refId;
table.meta = meta;
let i: number, j: number; const timeField = getTimeField([]);
const metricLabels: { [key: string]: number } = {}; const metricFields = Object.keys(md.reduce((acc, series) => ({ ...acc, ...series.metric }), {}))
.sort()
.map(label => {
return {
name: label,
config: { filterable: true },
type: FieldType.other,
values: new ArrayVector(),
};
});
const valueField = getValueField([], valueText);
if (!md || md.length === 0) { md.forEach(d => {
return table; if (isMatrixData(d)) {
d.values.forEach(val => {
timeField.values.add(val[0] * 1000);
metricFields.forEach(metricField => metricField.values.add(getLabelValue(d.metric, metricField.name)));
valueField.values.add(parseFloat(val[1]));
});
} else {
timeField.values.add(d.value[0] * 1000);
metricFields.forEach(metricField => metricField.values.add(getLabelValue(d.metric, metricField.name)));
valueField.values.add(parseFloat(d.value[1]));
} }
});
// Collect all labels across all metrics return {
_.each(md, series => { meta: options.meta,
for (const label in series.metric) { refId: options.refId,
if (!metricLabels.hasOwnProperty(label)) { length: timeField.values.length,
metricLabels[label] = 1; fields: [timeField, ...metricFields, valueField],
} };
} }
});
// Sort metric labels, create columns for them and record their index function getLabelValue(metric: PromMetric, label: string): string | number {
const sortedLabels = _.keys(metricLabels).sort(); if (metric.hasOwnProperty(label)) {
table.columns.push({ text: 'Time', type: FieldType.time }); if (label === 'le') {
_.each(sortedLabels, (label, labelIndex) => { return parseHistogramLabel(metric[label]);
metricLabels[label] = labelIndex + 1;
table.columns.push({ text: label, filterable: true });
});
const valueText = resultCount > 1 || valueWithRefId ? `Value #${refId}` : 'Value';
table.columns.push({ text: valueText });
// Populate rows, set value to empty string when label not present.
_.each(md, series => {
if (series.value) {
series.values = [series.value];
}
if (series.values) {
for (i = 0; i < series.values.length; i++) {
const values = series.values[i];
const reordered: any = [values[0] * 1000];
if (series.metric) {
for (j = 0; j < sortedLabels.length; j++) {
const label = sortedLabels[j];
if (series.metric.hasOwnProperty(label)) {
if (label === 'le') {
reordered.push(parseHistogramLabel(series.metric[label]));
} else {
reordered.push(series.metric[label]);
}
} else {
reordered.push('');
}
}
}
reordered.push(parseFloat(values[1]));
table.rows.push(reordered);
}
}
});
return table;
}
transformInstantMetricData(md: any, options: any): TimeSeries {
const dps = [];
const { name, labels } = this.createLabelInfo(md.metric, options);
dps.push([parseFloat(md.value[1]), md.value[0] * 1000]);
return { target: name ?? '', title: name, datapoints: dps, tags: labels, refId: options.refId, meta: options.meta };
}
createLabelInfo(labels: { [key: string]: string }, options: any): { name?: string; labels: Labels; title?: string } {
if (options?.legendFormat) {
const title = this.renderTemplate(this.templateSrv.replace(options.legendFormat, options?.scopedVars), labels);
return { name: title, title, labels };
} }
return metric[label];
}
return '';
}
let { __name__, ...labelsWithoutName } = labels; function getTimeField(data: PromValue[], isMs = false): MutableField {
return {
name: TIME_SERIES_TIME_FIELD_NAME,
type: FieldType.time,
config: {},
values: new ArrayVector<number>(data.map(val => (isMs ? val[0] : val[0] * 1000))),
};
}
let title = __name__ || ''; function getValueField(
data: PromValue[],
valueName: string = TIME_SERIES_VALUE_FIELD_NAME,
parseValue = true
): MutableField {
return {
name: valueName,
type: FieldType.number,
config: {},
values: new ArrayVector<number | null>(data.map(val => (parseValue ? parseFloat(val[1]) : val[1]))),
};
}
const labelPart = formatLabels(labelsWithoutName); function createLabelInfo(labels: { [key: string]: string }, options: TransformOptions) {
if (options?.legendFormat) {
const title = renderTemplate(templateSrv.replace(options.legendFormat, options?.scopedVars), labels);
return { name: title, labels };
}
if (!title && !labelPart) { const { __name__, ...labelsWithoutName } = labels;
title = options.query; const labelPart = formatLabels(labelsWithoutName);
const title = `${__name__ ?? ''}${labelPart}`;
return { name: title, labels: labelsWithoutName };
}
export function getOriginalMetricName(labelData: { [key: string]: string }) {
const metricName = labelData.__name__ || '';
delete labelData.__name__;
const labelPart = Object.entries(labelData)
.map(label => `${label[0]}="${label[1]}"`)
.join(',');
return `${metricName}{${labelPart}}`;
}
export function renderTemplate(aliasPattern: string, aliasData: { [key: string]: string }) {
const aliasRegex = /\{\{\s*(.+?)\s*\}\}/g;
return aliasPattern.replace(aliasRegex, (_match, g1) => {
if (aliasData[g1]) {
return aliasData[g1];
} }
return '';
});
}
title = `${__name__ ?? ''}${labelPart}`; function transformToHistogramOverTime(seriesList: DataFrame[]) {
/* t1 = timestamp1, t2 = timestamp2 etc.
return { name: title, title, labels: labelsWithoutName };
}
getOriginalMetricName(labelData: { [key: string]: string }) {
const metricName = labelData.__name__ || '';
delete labelData.__name__;
const labelPart = Object.entries(labelData)
.map(label => `${label[0]}="${label[1]}"`)
.join(',');
return `${metricName}{${labelPart}}`;
}
renderTemplate(aliasPattern: string, aliasData: { [key: string]: string }) {
const aliasRegex = /\{\{\s*(.+?)\s*\}\}/g;
return aliasPattern.replace(aliasRegex, (match, g1) => {
if (aliasData[g1]) {
return aliasData[g1];
}
return '';
});
}
transformToHistogramOverTime(seriesList: TimeSeries[]) {
/* t1 = timestamp1, t2 = timestamp2 etc.
t1 t2 t3 t1 t2 t3 t1 t2 t3 t1 t2 t3
le10 10 10 0 => 10 10 0 le10 10 10 0 => 10 10 0
le20 20 10 30 => 10 0 30 le20 20 10 30 => 10 0 30
le30 30 10 35 => 10 0 5 le30 30 10 35 => 10 0 5
*/ */
for (let i = seriesList.length - 1; i > 0; i--) { for (let i = seriesList.length - 1; i > 0; i--) {
const topSeries = seriesList[i].datapoints; const topSeries = seriesList[i].fields.find(s => s.name === TIME_SERIES_VALUE_FIELD_NAME);
const bottomSeries = seriesList[i - 1].datapoints; const bottomSeries = seriesList[i - 1].fields.find(s => s.name === TIME_SERIES_VALUE_FIELD_NAME);
if (!topSeries || !bottomSeries) { if (!topSeries || !bottomSeries) {
throw new Error('Prometheus heatmap transform error: data should be a time series'); throw new Error('Prometheus heatmap transform error: data should be a time series');
}
for (let j = 0; j < topSeries.length; j++) {
const bottomPoint = bottomSeries[j] || [0];
topSeries[j][0]! -= bottomPoint[0]!;
}
} }
return seriesList; for (let j = 0; j < topSeries.values.length; j++) {
const bottomPoint = bottomSeries.values.get(j) || [0];
topSeries.values.toArray()[j] -= bottomPoint;
}
} }
return seriesList;
} }
function sortSeriesByLabel(s1: TimeSeries, s2: TimeSeries): number { function sortSeriesByLabel(s1: DataFrame, s2: DataFrame): number {
let le1, le2; let le1, le2;
try { try {
// fail if not integer. might happen with bad queries // fail if not integer. might happen with bad queries
le1 = parseHistogramLabel(s1.target); le1 = parseHistogramLabel(s1.name ?? '');
le2 = parseHistogramLabel(s2.target); le2 = parseHistogramLabel(s2.name ?? '');
} catch (err) { } catch (err) {
console.error(err); console.error(err);
return 0; return 0;

View File

@ -1,4 +1,5 @@
import { DataQuery, DataSourceJsonData } from '@grafana/data'; import { DataQuery, DataSourceJsonData, QueryResultMeta, ScopedVars } from '@grafana/data';
import { FetchError } from '@grafana/runtime';
export interface PromQuery extends DataQuery { export interface PromQuery extends DataQuery {
expr: string; expr: string;
@ -41,3 +42,77 @@ export interface PromMetricsMetadataItem {
export interface PromMetricsMetadata { export interface PromMetricsMetadata {
[metric: string]: PromMetricsMetadataItem[]; [metric: string]: PromMetricsMetadataItem[];
} }
export interface PromDataSuccessResponse<T = PromData> {
status: 'success';
data: T;
}
export interface PromDataErrorResponse<T = PromData> {
status: 'error';
errorType: string;
error: string;
data: T;
}
export type PromData = PromMatrixData | PromVectorData | PromScalarData;
export interface PromVectorData {
resultType: 'vector';
result: Array<{
metric: PromMetric;
value: PromValue;
}>;
}
export interface PromMatrixData {
resultType: 'matrix';
result: Array<{
metric: PromMetric;
values: PromValue[];
}>;
}
export interface PromScalarData {
resultType: 'scalar';
result: PromValue;
}
export type PromValue = [number, any];
export interface PromMetric {
__name__?: string;
[index: string]: any;
}
export function isFetchErrorResponse(response: any): response is FetchError {
return 'cancelled' in response;
}
export function isMatrixData(result: MatrixOrVectorResult): result is PromMatrixData['result'][0] {
return 'values' in result;
}
export type MatrixOrVectorResult = PromMatrixData['result'][0] | PromVectorData['result'][0];
export interface TransformOptions {
format?: string;
step?: number;
legendFormat?: string;
start: number;
end: number;
query: string;
responseListLength: number;
scopedVars?: ScopedVars;
refId: string;
valueWithRefId?: boolean;
meta: QueryResultMeta;
}
export interface PromLabelQueryResponse {
data: {
status: string;
data: string[];
};
cancelled?: boolean;
}