mirror of
https://github.com/grafana/grafana.git
synced 2025-02-10 15:45:43 -06:00
Prometheus: Run Explore range queries trough backend (#39133)
* Run Explore range queries trough backend * Remove trailing comma * Add timeRange step alignment to backend * Remove creation of instant query on backend as it is not supported ATM * Remove non-related frontend changes * Pass offset to calculate aligned range trough prom query * Update order in query error message * tableRefIds shouldn't contain undefined refIds * Remove cloning of dataframes when processing * Don't mutate response * Remove ordering of processed frames * Remove df because not needed
This commit is contained in:
parent
0606618d4a
commit
81756cd702
@ -51,6 +51,7 @@ type QueryModel struct {
|
||||
RangeQuery bool `json:"range"`
|
||||
InstantQuery bool `json:"instant"`
|
||||
IntervalFactor int64 `json:"intervalFactor"`
|
||||
OffsetSec int64 `json:"offsetSec"`
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
@ -162,13 +163,20 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
|
||||
span.SetTag("stop_unixnano", query.End.UnixNano())
|
||||
defer span.Finish()
|
||||
|
||||
value, _, err := client.QueryRange(ctx, query.Expr, timeRange)
|
||||
var results model.Value
|
||||
|
||||
if err != nil {
|
||||
return &result, err
|
||||
switch query.QueryType {
|
||||
case Range:
|
||||
results, _, err = client.QueryRange(ctx, query.Expr, timeRange)
|
||||
if err != nil {
|
||||
return &result, fmt.Errorf("query: %s failed with: %v", query.Expr, err)
|
||||
}
|
||||
|
||||
default:
|
||||
return &result, fmt.Errorf("unknown Query type detected %#v", query.QueryType)
|
||||
}
|
||||
|
||||
frame, err := parseResponse(value, query)
|
||||
frame, err := parseResponse(results, query)
|
||||
if err != nil {
|
||||
return &result, err
|
||||
}
|
||||
@ -283,13 +291,20 @@ func (s *Service) parseQuery(queryContext *backend.QueryDataRequest, dsInfo *Dat
|
||||
expr = strings.ReplaceAll(expr, "$__range", strconv.FormatInt(rangeS, 10)+"s")
|
||||
expr = strings.ReplaceAll(expr, "$__rate_interval", intervalv2.FormatDuration(calculateRateInterval(interval, dsInfo.TimeInterval, s.intervalCalculator)))
|
||||
|
||||
queryType := Range
|
||||
|
||||
// Align query range to step. It rounds start and end down to a multiple of step.
|
||||
start := int64(math.Floor((float64(query.TimeRange.From.Unix()+model.OffsetSec)/interval.Seconds()))*interval.Seconds() - float64(model.OffsetSec))
|
||||
end := int64(math.Floor((float64(query.TimeRange.To.Unix()+model.OffsetSec)/interval.Seconds()))*interval.Seconds() - float64(model.OffsetSec))
|
||||
|
||||
qs = append(qs, &PrometheusQuery{
|
||||
Expr: expr,
|
||||
Step: interval,
|
||||
LegendFormat: model.LegendFormat,
|
||||
Start: query.TimeRange.From,
|
||||
End: query.TimeRange.To,
|
||||
Start: time.Unix(start, 0),
|
||||
End: time.Unix(end, 0),
|
||||
RefId: query.RefID,
|
||||
QueryType: queryType,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -275,6 +275,45 @@ func TestPrometheus_parseQuery(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "rate(ALERTS{job=\"test\" [1m]})", models[0].Expr)
|
||||
})
|
||||
|
||||
t.Run("parsing query model of range query", func(t *testing.T) {
|
||||
timeRange := backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(48 * time.Hour),
|
||||
}
|
||||
|
||||
query := queryContext(`{
|
||||
"expr": "go_goroutines",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"refId": "A",
|
||||
"range": true
|
||||
}`, timeRange)
|
||||
|
||||
dsInfo := &DatasourceInfo{}
|
||||
models, err := service.parseQuery(query, dsInfo)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, Range, models[0].QueryType)
|
||||
})
|
||||
|
||||
t.Run("parsing query model of with default query type", func(t *testing.T) {
|
||||
timeRange := backend.TimeRange{
|
||||
From: now,
|
||||
To: now.Add(48 * time.Hour),
|
||||
}
|
||||
|
||||
query := queryContext(`{
|
||||
"expr": "go_goroutines",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"refId": "A"
|
||||
}`, timeRange)
|
||||
|
||||
dsInfo := &DatasourceInfo{}
|
||||
models, err := service.parseQuery(query, dsInfo)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, Range, models[0].QueryType)
|
||||
})
|
||||
}
|
||||
|
||||
func queryContext(json string, timeRange backend.TimeRange) *backend.QueryDataRequest {
|
||||
|
@ -9,4 +9,13 @@ type PrometheusQuery struct {
|
||||
Start time.Time
|
||||
End time.Time
|
||||
RefId string
|
||||
QueryType PrometheusQueryType
|
||||
}
|
||||
|
||||
type PrometheusQueryType string
|
||||
|
||||
const (
|
||||
Range PrometheusQueryType = "range"
|
||||
//This is currently not used, but we will use it in next iteration
|
||||
Instant PrometheusQueryType = "instant"
|
||||
)
|
||||
|
@ -8,7 +8,6 @@ import {
|
||||
DataQueryError,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceApi,
|
||||
DataSourceInstanceSettings,
|
||||
dateMath,
|
||||
DateTime,
|
||||
@ -17,7 +16,7 @@ import {
|
||||
ScopedVars,
|
||||
TimeRange,
|
||||
} from '@grafana/data';
|
||||
import { BackendSrvRequest, FetchError, FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import { BackendSrvRequest, FetchError, FetchResponse, getBackendSrv, DataSourceWithBackend } from '@grafana/runtime';
|
||||
|
||||
import { safeStringifyValue } from 'app/core/utils/explore';
|
||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
@ -26,7 +25,7 @@ import addLabelToQuery from './add_label_to_query';
|
||||
import PrometheusLanguageProvider from './language_provider';
|
||||
import { expandRecordingRules } from './language_utils';
|
||||
import { getInitHints, getQueryHints } from './query_hints';
|
||||
import { getOriginalMetricName, renderTemplate, transform } from './result_transformer';
|
||||
import { getOriginalMetricName, renderTemplate, transform, transformV2 } from './result_transformer';
|
||||
import {
|
||||
ExemplarTraceIdDestination,
|
||||
isFetchErrorResponse,
|
||||
@ -47,12 +46,13 @@ export const ANNOTATION_QUERY_STEP_DEFAULT = '60s';
|
||||
const EXEMPLARS_NOT_AVAILABLE = 'Exemplars for this query are not available.';
|
||||
const GET_AND_POST_METADATA_ENDPOINTS = ['api/v1/query', 'api/v1/query_range', 'api/v1/series', 'api/v1/labels'];
|
||||
|
||||
export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions> {
|
||||
export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromOptions> {
|
||||
type: string;
|
||||
editorSrc: string;
|
||||
ruleMappings: { [index: string]: string };
|
||||
url: string;
|
||||
directUrl: string;
|
||||
access: 'direct' | 'proxy';
|
||||
basicAuth: any;
|
||||
withCredentials: any;
|
||||
metricsNameCache = new LRU<string, string[]>(10);
|
||||
@ -75,6 +75,7 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
this.type = 'prometheus';
|
||||
this.editorSrc = 'app/features/prometheus/partials/query.editor.html';
|
||||
this.url = instanceSettings.url!;
|
||||
this.access = instanceSettings.access;
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
this.interval = instanceSettings.jsonData.timeInterval || '15s';
|
||||
@ -288,24 +289,52 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
};
|
||||
};
|
||||
|
||||
prepareOptionsV2 = (options: DataQueryRequest<PromQuery>) => {
|
||||
const targets = options.targets.map((target) => {
|
||||
// We want to format Explore + range queries as time_series
|
||||
return {
|
||||
...target,
|
||||
instant: false,
|
||||
range: true,
|
||||
format: 'time_series',
|
||||
offsetSec: this.timeSrv.timeRange().to.utcOffset() * 60,
|
||||
};
|
||||
});
|
||||
|
||||
return { ...options, targets };
|
||||
};
|
||||
|
||||
query(options: DataQueryRequest<PromQuery>): Observable<DataQueryResponse> {
|
||||
const start = this.getPrometheusTime(options.range.from, false);
|
||||
const end = this.getPrometheusTime(options.range.to, true);
|
||||
const { queries, activeTargets } = this.prepareTargets(options, start, end);
|
||||
// WIP - currently we want to run trough backend only if all queries are explore + range queries
|
||||
const shouldRunBackendQuery =
|
||||
this.access === 'proxy' &&
|
||||
options.app === CoreApp.Explore &&
|
||||
!options.targets.some((query) => query.exemplar) &&
|
||||
!options.targets.some((query) => query.instant);
|
||||
|
||||
// No valid targets, return the empty result to save a round trip.
|
||||
if (!queries || !queries.length) {
|
||||
return of({
|
||||
data: [],
|
||||
state: LoadingState.Done,
|
||||
});
|
||||
if (shouldRunBackendQuery) {
|
||||
const newOptions = this.prepareOptionsV2(options);
|
||||
return super.query(newOptions).pipe(map((response) => transformV2(response, newOptions)));
|
||||
// Run queries trough browser/proxy
|
||||
} else {
|
||||
const start = this.getPrometheusTime(options.range.from, false);
|
||||
const end = this.getPrometheusTime(options.range.to, true);
|
||||
const { queries, activeTargets } = this.prepareTargets(options, start, end);
|
||||
|
||||
// No valid targets, return the empty result to save a round trip.
|
||||
if (!queries || !queries.length) {
|
||||
return of({
|
||||
data: [],
|
||||
state: LoadingState.Done,
|
||||
});
|
||||
}
|
||||
|
||||
if (options.app === CoreApp.Explore) {
|
||||
return this.exploreQuery(queries, activeTargets, end);
|
||||
}
|
||||
|
||||
return this.panelsQuery(queries, activeTargets, end, options.requestId, options.scopedVars);
|
||||
}
|
||||
|
||||
if (options.app === CoreApp.Explore) {
|
||||
return this.exploreQuery(queries, activeTargets, end);
|
||||
}
|
||||
|
||||
return this.panelsQuery(queries, activeTargets, end, options.requestId, options.scopedVars);
|
||||
}
|
||||
|
||||
private exploreQuery(queries: PromQueryRequest[], activeTargets: PromQuery[], end: number) {
|
||||
@ -824,6 +853,27 @@ export class PrometheusDatasource extends DataSourceApi<PromQuery, PromOptions>
|
||||
getOriginalMetricName(labelData: { [key: string]: string }) {
|
||||
return getOriginalMetricName(labelData);
|
||||
}
|
||||
|
||||
// Used when running queries trough backend
|
||||
filterQuery(query: PromQuery): boolean {
|
||||
if (query.hide || !query.expr) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Used when running queries trough backend
|
||||
applyTemplateVariables(target: PromQuery, scopedVars: ScopedVars): Record<string, any> {
|
||||
const variables = cloneDeep(scopedVars);
|
||||
// We want to interpolate these variables on backend
|
||||
delete variables.__interval;
|
||||
delete variables.__interval_ms;
|
||||
|
||||
return {
|
||||
...target,
|
||||
expr: this.templateSrv.replace(target.expr, variables, this.interpolateQueryExpr),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -13,8 +13,12 @@ import {
|
||||
ScopedVars,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
DataQueryResponse,
|
||||
DataQueryRequest,
|
||||
PreferredVisualisationType,
|
||||
} from '@grafana/data';
|
||||
import { FetchResponse, getDataSourceSrv, getTemplateSrv } from '@grafana/runtime';
|
||||
import { partition } from 'lodash';
|
||||
import { descending, deviation } from 'd3';
|
||||
import {
|
||||
ExemplarTraceIdDestination,
|
||||
@ -37,6 +41,84 @@ interface TimeAndValue {
|
||||
[TIME_SERIES_VALUE_FIELD_NAME]: number;
|
||||
}
|
||||
|
||||
// V2 result trasnformer used to transform query results from queries that were run trough prometheus backend
|
||||
export function transformV2(response: DataQueryResponse, options: DataQueryRequest<PromQuery>) {
|
||||
// Get refIds that have table format as we need to process those to table reuslts
|
||||
const tableRefIds = options.targets.filter((target) => target.format === 'table').map((target) => target.refId);
|
||||
const [tableResults, otherResults]: [DataFrame[], DataFrame[]] = partition(response.data, (dataFrame) =>
|
||||
dataFrame.refId ? tableRefIds.includes(dataFrame.refId) : false
|
||||
);
|
||||
|
||||
// For table results, we need to transform data frames to table data frames
|
||||
const tableFrames = tableResults.map((dataFrame) => {
|
||||
const df = transformDFoTable(dataFrame, options.targets.length);
|
||||
return df;
|
||||
});
|
||||
|
||||
// Everything else is processed as time_series result and graph preferredVisualisationType
|
||||
const otherFrames = otherResults.map((dataFrame) => {
|
||||
const df = {
|
||||
...dataFrame,
|
||||
meta: {
|
||||
...dataFrame.meta,
|
||||
preferredVisualisationType: 'graph',
|
||||
},
|
||||
} as DataFrame;
|
||||
return df;
|
||||
});
|
||||
|
||||
return { ...response, data: [...otherFrames, ...tableFrames] };
|
||||
}
|
||||
|
||||
export function transformDFoTable(df: DataFrame, responseLength: number): DataFrame {
|
||||
if (df.length === 0) {
|
||||
return df;
|
||||
}
|
||||
|
||||
const timeField = df.fields[0];
|
||||
const valueField = df.fields[1];
|
||||
|
||||
// Create label fields
|
||||
const promLabels: PromMetric = valueField.labels ?? {};
|
||||
const labelFields = Object.keys(promLabels)
|
||||
.sort()
|
||||
.map((label) => {
|
||||
const numberField = label === 'le';
|
||||
return {
|
||||
name: label,
|
||||
config: { filterable: true },
|
||||
type: numberField ? FieldType.number : FieldType.string,
|
||||
values: new ArrayVector(),
|
||||
};
|
||||
});
|
||||
|
||||
// Fill labelFields with label values
|
||||
labelFields.forEach((field) => field.values.add(getLabelValue(promLabels, field.name)));
|
||||
|
||||
const tableDataFrame = {
|
||||
...df,
|
||||
name: undefined,
|
||||
meta: { ...df.meta, preferredVisualisationType: 'table' as PreferredVisualisationType },
|
||||
fields: [
|
||||
timeField,
|
||||
...labelFields,
|
||||
{
|
||||
...valueField,
|
||||
name: getValueText(responseLength, df.refId),
|
||||
labels: undefined,
|
||||
config: { ...valueField.config, displayNameFromDS: undefined },
|
||||
state: { ...valueField.state, displayName: undefined },
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
return tableDataFrame;
|
||||
}
|
||||
|
||||
function getValueText(responseLength: number, refId = '') {
|
||||
return responseLength > 1 ? `Value #${refId}` : 'Value';
|
||||
}
|
||||
|
||||
export function transform(
|
||||
response: FetchResponse<PromDataSuccessResponse>,
|
||||
transformOptions: {
|
||||
|
@ -10,6 +10,8 @@ export interface PromQuery extends DataQuery {
|
||||
hinting?: boolean;
|
||||
interval?: string;
|
||||
intervalFactor?: number;
|
||||
// Timezone offset to align start & end time on backend
|
||||
offsetSec?: number;
|
||||
legendFormat?: string;
|
||||
valueWithRefId?: boolean;
|
||||
requestId?: string;
|
||||
|
Loading…
Reference in New Issue
Block a user