Prometheus: Run Explore range queries trough backend (#39133)

* Run Explore range queries trough backend

* Remove trailing comma

* Add timeRange step alignment to backend

* Remove creation of instant query on backend as it is not supported ATM

* Remove non-related frontend changes

* Pass offset to calculate aligned range trough prom query

* Update order in query error message

* tableRefIds shouldn't contain undefined refIds

* Remove cloning of dataframes when processing

* Don't mutate response

* Remove ordering of processed frames

* Remove df because not needed
This commit is contained in:
Ivana Huckova 2021-09-17 13:39:26 +02:00 committed by GitHub
parent 0606618d4a
commit 81756cd702
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 923 additions and 552 deletions

View File

@ -51,6 +51,7 @@ type QueryModel struct {
RangeQuery bool `json:"range"`
InstantQuery bool `json:"instant"`
IntervalFactor int64 `json:"intervalFactor"`
OffsetSec int64 `json:"offsetSec"`
}
type Service struct {
@ -162,13 +163,20 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
span.SetTag("stop_unixnano", query.End.UnixNano())
defer span.Finish()
value, _, err := client.QueryRange(ctx, query.Expr, timeRange)
var results model.Value
switch query.QueryType {
case Range:
results, _, err = client.QueryRange(ctx, query.Expr, timeRange)
if err != nil {
return &result, err
return &result, fmt.Errorf("query: %s failed with: %v", query.Expr, err)
}
frame, err := parseResponse(value, query)
default:
return &result, fmt.Errorf("unknown Query type detected %#v", query.QueryType)
}
frame, err := parseResponse(results, query)
if err != nil {
return &result, err
}
@ -283,13 +291,20 @@ func (s *Service) parseQuery(queryContext *backend.QueryDataRequest, dsInfo *Dat
expr = strings.ReplaceAll(expr, "$__range", strconv.FormatInt(rangeS, 10)+"s")
expr = strings.ReplaceAll(expr, "$__rate_interval", intervalv2.FormatDuration(calculateRateInterval(interval, dsInfo.TimeInterval, s.intervalCalculator)))
queryType := Range
// Align query range to step. It rounds start and end down to a multiple of step.
start := int64(math.Floor((float64(query.TimeRange.From.Unix()+model.OffsetSec)/interval.Seconds()))*interval.Seconds() - float64(model.OffsetSec))
end := int64(math.Floor((float64(query.TimeRange.To.Unix()+model.OffsetSec)/interval.Seconds()))*interval.Seconds() - float64(model.OffsetSec))
qs = append(qs, &PrometheusQuery{
Expr: expr,
Step: interval,
LegendFormat: model.LegendFormat,
Start: query.TimeRange.From,
End: query.TimeRange.To,
Start: time.Unix(start, 0),
End: time.Unix(end, 0),
RefId: query.RefID,
QueryType: queryType,
})
}

View File

@ -275,6 +275,45 @@ func TestPrometheus_parseQuery(t *testing.T) {
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [1m]})", models[0].Expr)
})
t.Run("parsing query model of range query", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
query := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A",
"range": true
}`, timeRange)
dsInfo := &DatasourceInfo{}
models, err := service.parseQuery(query, dsInfo)
require.NoError(t, err)
require.Equal(t, Range, models[0].QueryType)
})
t.Run("parsing query model of with default query type", func(t *testing.T) {
timeRange := backend.TimeRange{
From: now,
To: now.Add(48 * time.Hour),
}
query := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`, timeRange)
dsInfo := &DatasourceInfo{}
models, err := service.parseQuery(query, dsInfo)
require.NoError(t, err)
require.Equal(t, Range, models[0].QueryType)
})
}
func queryContext(json string, timeRange backend.TimeRange) *backend.QueryDataRequest {

View File

@ -9,4 +9,13 @@ type PrometheusQuery struct {
Start time.Time
End time.Time
RefId string
QueryType PrometheusQueryType
}
type PrometheusQueryType string
const (
Range PrometheusQueryType = "range"
//This is currently not used, but we will use it in next iteration
Instant PrometheusQueryType = "instant"
)

View File

@ -8,7 +8,6 @@ import {
DataQueryError,
DataQueryRequest,
DataQueryResponse,
DataSourceApi,
DataSourceInstanceSettings,
dateMath,
DateTime,
@ -17,7 +16,7 @@ import {
ScopedVars,
TimeRange,
} from '@grafana/data';
import { BackendSrvRequest, FetchError, FetchResponse, getBackendSrv } from '@grafana/runtime';
import { BackendSrvRequest, FetchError, FetchResponse, getBackendSrv, DataSourceWithBackend } from '@grafana/runtime';
import { safeStringifyValue } from 'app/core/utils/explore';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
@ -26,7 +25,7 @@ import addLabelToQuery from './add_label_to_query';
import PrometheusLanguageProvider from './language_provider';
import { expandRecordingRules } from './language_utils';
import { getInitHints, getQueryHints } from './query_hints';
import { getOriginalMetricName, renderTemplate, transform } from './result_transformer';
import { getOriginalMetricName, renderTemplate, transform, transformV2 } from './result_transformer';
import {
ExemplarTraceIdDestination,
isFetchErrorResponse,
@ -47,12 +46,13 @@ export const ANNOTATION_QUERY_STEP_DEFAULT = '60s';
const EXEMPLARS_NOT_AVAILABLE = 'Exemplars for this query are not available.';
const GET_AND_POST_METADATA_ENDPOINTS = ['api/v1/query', 'api/v1/query_range', 'api/v1/series', 'api/v1/labels'];
export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions> {
export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromOptions> {
type: string;
editorSrc: string;
ruleMappings: { [index: string]: string };
url: string;
directUrl: string;
access: 'direct' | 'proxy';
basicAuth: any;
withCredentials: any;
metricsNameCache = new LRU<string, string[]>(10);
@ -75,6 +75,7 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
this.type = 'prometheus';
this.editorSrc = 'app/features/prometheus/partials/query.editor.html';
this.url = instanceSettings.url!;
this.access = instanceSettings.access;
this.basicAuth = instanceSettings.basicAuth;
this.withCredentials = instanceSettings.withCredentials;
this.interval = instanceSettings.jsonData.timeInterval || '15s';
@ -288,7 +289,34 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
};
};
prepareOptionsV2 = (options: DataQueryRequest<PromQuery>) => {
const targets = options.targets.map((target) => {
// We want to format Explore + range queries as time_series
return {
...target,
instant: false,
range: true,
format: 'time_series',
offsetSec: this.timeSrv.timeRange().to.utcOffset() * 60,
};
});
return { ...options, targets };
};
query(options: DataQueryRequest<PromQuery>): Observable<DataQueryResponse> {
// WIP - currently we want to run trough backend only if all queries are explore + range queries
const shouldRunBackendQuery =
this.access === 'proxy' &&
options.app === CoreApp.Explore &&
!options.targets.some((query) => query.exemplar) &&
!options.targets.some((query) => query.instant);
if (shouldRunBackendQuery) {
const newOptions = this.prepareOptionsV2(options);
return super.query(newOptions).pipe(map((response) => transformV2(response, newOptions)));
// Run queries trough browser/proxy
} else {
const start = this.getPrometheusTime(options.range.from, false);
const end = this.getPrometheusTime(options.range.to, true);
const { queries, activeTargets } = this.prepareTargets(options, start, end);
@ -307,6 +335,7 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
return this.panelsQuery(queries, activeTargets, end, options.requestId, options.scopedVars);
}
}
private exploreQuery(queries: PromQueryRequest[], activeTargets: PromQuery[], end: number) {
let runningQueriesCount = queries.length;
@ -824,6 +853,27 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
getOriginalMetricName(labelData: { [key: string]: string }) {
return getOriginalMetricName(labelData);
}
// Used when running queries trough backend
filterQuery(query: PromQuery): boolean {
if (query.hide || !query.expr) {
return false;
}
return true;
}
// Used when running queries trough backend
applyTemplateVariables(target: PromQuery, scopedVars: ScopedVars): Record<string, any> {
const variables = cloneDeep(scopedVars);
// We want to interpolate these variables on backend
delete variables.__interval;
delete variables.__interval_ms;
return {
...target,
expr: this.templateSrv.replace(target.expr, variables, this.interpolateQueryExpr),
};
}
}
/**

View File

@ -1,5 +1,6 @@
import { DataFrame, FieldType } from '@grafana/data';
import { transform } from './result_transformer';
import { DataFrame, FieldType, DataQueryRequest, DataQueryResponse, MutableDataFrame } from '@grafana/data';
import { transform, transformV2, transformDFoTable } from './result_transformer';
import { PromQuery } from './types';
jest.mock('@grafana/runtime', () => ({
getTemplateSrv: () => ({
@ -31,6 +32,169 @@ const matrixResponse = {
};
describe('Prometheus Result Transformer', () => {
describe('transformV2', () => {
it('results with time_series format should be enriched with preferredVisualisationType', () => {
const options = ({
targets: [
{
format: 'time_series',
refId: 'A',
},
],
} as unknown) as DataQueryRequest<PromQuery>;
const response = ({
state: 'Done',
data: [
{
fields: [],
length: 2,
name: 'ALERTS',
refId: 'A',
},
],
} as unknown) as DataQueryResponse;
const series = transformV2(response, options);
expect(series).toEqual({
data: [{ fields: [], length: 2, meta: { preferredVisualisationType: 'graph' }, name: 'ALERTS', refId: 'A' }],
state: 'Done',
});
});
it('results with table format should be transformed to table dataFrames', () => {
const options = ({
targets: [
{
format: 'table',
refId: 'A',
},
],
} as unknown) as DataQueryRequest<PromQuery>;
const response = ({
state: 'Done',
data: [
new MutableDataFrame({
refId: 'A',
fields: [
{ name: 'time', type: FieldType.time, values: [6, 5, 4] },
{
name: 'value',
type: FieldType.number,
values: [6, 5, 4],
labels: { label1: 'value1', label2: 'value2' },
},
],
}),
],
} as unknown) as DataQueryResponse;
const series = transformV2(response, options);
// expect(series.data[0]).toBe({});
expect(series.data[0].fields[0].name).toEqual('time');
expect(series.data[0].fields[1].name).toEqual('label1');
expect(series.data[0].fields[2].name).toEqual('label2');
expect(series.data[0].fields[3].name).toEqual('Value');
expect(series.data[0].meta?.preferredVisualisationType).toEqual('table');
});
it('results with table and time_series format should be correctly transformed', () => {
const options = ({
targets: [
{
format: 'table',
refId: 'A',
},
{
format: 'time_series',
refId: 'B',
},
],
} as unknown) as DataQueryRequest<PromQuery>;
const response = ({
state: 'Done',
data: [
new MutableDataFrame({
refId: 'A',
fields: [
{ name: 'time', type: FieldType.time, values: [6, 5, 4] },
{
name: 'value',
type: FieldType.number,
values: [6, 5, 4],
labels: { label1: 'value1', label2: 'value2' },
},
],
}),
new MutableDataFrame({
refId: 'B',
fields: [
{ name: 'time', type: FieldType.time, values: [6, 5, 4] },
{
name: 'value',
type: FieldType.number,
values: [6, 5, 4],
labels: { label1: 'value1', label2: 'value2' },
},
],
}),
],
} as unknown) as DataQueryResponse;
const series = transformV2(response, options);
expect(series.data[0].fields.length).toEqual(2);
expect(series.data[0].meta?.preferredVisualisationType).toEqual('graph');
expect(series.data[1].fields.length).toEqual(4);
expect(series.data[1].meta?.preferredVisualisationType).toEqual('table');
});
});
describe('transformDFoTable', () => {
it('transforms dataFrame with response length 1 to table dataFrame', () => {
const df = new MutableDataFrame({
refId: 'A',
fields: [
{ name: 'time', type: FieldType.time, values: [6, 5, 4] },
{
name: 'value',
type: FieldType.number,
values: [6, 5, 4],
labels: { label1: 'value1', label2: 'value2' },
},
],
});
const tableDf = transformDFoTable(df, 1);
expect(tableDf.fields.length).toBe(4);
expect(tableDf.fields[0].name).toBe('time');
expect(tableDf.fields[1].name).toBe('label1');
expect(tableDf.fields[1].values.get(0)).toBe('value1');
expect(tableDf.fields[2].name).toBe('label2');
expect(tableDf.fields[2].values.get(0)).toBe('value2');
expect(tableDf.fields[3].name).toBe('Value');
});
it('transforms dataFrame with response length 2 to table dataFrame', () => {
const df = new MutableDataFrame({
refId: 'A',
fields: [
{ name: 'time', type: FieldType.time, values: [6, 5, 4] },
{
name: 'value',
type: FieldType.number,
values: [6, 5, 4],
labels: { label1: 'value1', label2: 'value2' },
},
],
});
const tableDf = transformDFoTable(df, 3);
expect(tableDf.fields.length).toBe(4);
expect(tableDf.fields[0].name).toBe('time');
expect(tableDf.fields[1].name).toBe('label1');
expect(tableDf.fields[1].values.get(0)).toBe('value1');
expect(tableDf.fields[2].name).toBe('label2');
expect(tableDf.fields[2].values.get(0)).toBe('value2');
expect(tableDf.fields[3].name).toBe('Value #A');
});
});
describe('transform', () => {
const options: any = { target: {}, query: {} };
describe('When nothing is returned', () => {
it('should return empty array', () => {
@ -341,7 +505,10 @@ describe('Prometheus Result Transformer', () => {
});
it('should fill null values', () => {
const result = transform({ data: matrixResponse } as any, { ...options, query: { step: 1, start: 0, end: 2 } });
const result = transform({ data: matrixResponse } as any, {
...options,
query: { step: 1, start: 0, end: 2 },
});
expect(result[0].fields[0].values.toArray()).toEqual([0, 1000, 2000]);
expect(result[0].fields[1].values.toArray()).toEqual([null, 10, 0]);
@ -453,7 +620,10 @@ describe('Prometheus Result Transformer', () => {
};
it('should correctly parse values', () => {
const result: DataFrame[] = transform({ data: response } as any, { ...options, target: { format: 'table' } });
const result: DataFrame[] = transform({ data: response } as any, {
...options,
target: { format: 'table' },
});
expect(result[0].fields[1].values.toArray()).toEqual([Number.POSITIVE_INFINITY]);
});
});
@ -478,7 +648,10 @@ describe('Prometheus Result Transformer', () => {
describe('When format is table', () => {
it('should correctly parse values', () => {
const result: DataFrame[] = transform({ data: response } as any, { ...options, target: { format: 'table' } });
const result: DataFrame[] = transform({ data: response } as any, {
...options,
target: { format: 'table' },
});
expect(result[0].fields[3].values.toArray()).toEqual([Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]);
});
});
@ -584,4 +757,5 @@ describe('Prometheus Result Transformer', () => {
});
});
});
});
});

View File

@ -13,8 +13,12 @@ import {
ScopedVars,
TIME_SERIES_TIME_FIELD_NAME,
TIME_SERIES_VALUE_FIELD_NAME,
DataQueryResponse,
DataQueryRequest,
PreferredVisualisationType,
} from '@grafana/data';
import { FetchResponse, getDataSourceSrv, getTemplateSrv } from '@grafana/runtime';
import { partition } from 'lodash';
import { descending, deviation } from 'd3';
import {
ExemplarTraceIdDestination,
@ -37,6 +41,84 @@ interface TimeAndValue {
[TIME_SERIES_VALUE_FIELD_NAME]: number;
}
// V2 result trasnformer used to transform query results from queries that were run trough prometheus backend
export function transformV2(response: DataQueryResponse, options: DataQueryRequest<PromQuery>) {
// Get refIds that have table format as we need to process those to table reuslts
const tableRefIds = options.targets.filter((target) => target.format === 'table').map((target) => target.refId);
const [tableResults, otherResults]: [DataFrame[], DataFrame[]] = partition(response.data, (dataFrame) =>
dataFrame.refId ? tableRefIds.includes(dataFrame.refId) : false
);
// For table results, we need to transform data frames to table data frames
const tableFrames = tableResults.map((dataFrame) => {
const df = transformDFoTable(dataFrame, options.targets.length);
return df;
});
// Everything else is processed as time_series result and graph preferredVisualisationType
const otherFrames = otherResults.map((dataFrame) => {
const df = {
...dataFrame,
meta: {
...dataFrame.meta,
preferredVisualisationType: 'graph',
},
} as DataFrame;
return df;
});
return { ...response, data: [...otherFrames, ...tableFrames] };
}
export function transformDFoTable(df: DataFrame, responseLength: number): DataFrame {
if (df.length === 0) {
return df;
}
const timeField = df.fields[0];
const valueField = df.fields[1];
// Create label fields
const promLabels: PromMetric = valueField.labels ?? {};
const labelFields = Object.keys(promLabels)
.sort()
.map((label) => {
const numberField = label === 'le';
return {
name: label,
config: { filterable: true },
type: numberField ? FieldType.number : FieldType.string,
values: new ArrayVector(),
};
});
// Fill labelFields with label values
labelFields.forEach((field) => field.values.add(getLabelValue(promLabels, field.name)));
const tableDataFrame = {
...df,
name: undefined,
meta: { ...df.meta, preferredVisualisationType: 'table' as PreferredVisualisationType },
fields: [
timeField,
...labelFields,
{
...valueField,
name: getValueText(responseLength, df.refId),
labels: undefined,
config: { ...valueField.config, displayNameFromDS: undefined },
state: { ...valueField.state, displayName: undefined },
},
],
};
return tableDataFrame;
}
function getValueText(responseLength: number, refId = '') {
return responseLength > 1 ? `Value #${refId}` : 'Value';
}
export function transform(
response: FetchResponse<PromDataSuccessResponse>,
transformOptions: {

View File

@ -10,6 +10,8 @@ export interface PromQuery extends DataQuery {
hinting?: boolean;
interval?: string;
intervalFactor?: number;
// Timezone offset to align start & end time on backend
offsetSec?: number;
legendFormat?: string;
valueWithRefId?: boolean;
requestId?: string;