Prometheus: Run exemplars explore queries through backend (#39531)

* Prometheus: Run Explore both queries trough backend

* Refactor, simplify

* Set default values for query type selector

* Run multiple queries as one query trough backend

* Remove trailing newlines

* Pass utcOffset

* Remove trailing comma

* WIP: Implementatioon of exemplars

* add sampling for exemplars

* Refactor to use response as custom metadata

* Simplify processing of exemplars

* Update, clean up

* Refactor the way how we get available exemplars

* Simplify exemplars disabling and running on frontend

* Add tests

* Update toggle

* Remove console log

* Fix go linting

* Fix e2e test

* Trigger Build

* Compare lengts, small fix

* Remove duplicated time check

* Address feedback

* Remove redundant ! as not needed

* Update
This commit is contained in:
Ivana Huckova 2021-10-12 13:16:09 +02:00 committed by GitHub
parent 2f0eccb421
commit 19ad08e6b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 893 additions and 925 deletions

View File

@ -35,17 +35,15 @@ describe('Exemplars', () => {
});
it('should be able to navigate to configured data source', () => {
e2e().intercept('POST', '**/api/v1/query_exemplars', {
fixture: 'exemplars-query-response.json',
});
e2e().intercept('POST', '**/api/v1/query_range', {
fixture: 'prometheus-query-range-response.json',
});
e2e().intercept('POST', '**/api/v1/query', {
fixture: 'prometheus-query-response.json',
});
e2e().intercept('POST', '**/api/ds/query', {
fixture: 'tempo-response.json',
let intercept = 'prometheus';
e2e().intercept('/api/ds/query', (req) => {
if (intercept === 'prometheus') {
// For second intercept, we want to send tempo response
intercept = 'tempo';
req.reply({ fixture: 'exemplars-query-response.json' });
} else {
req.reply({ fixture: 'tempo-response.json' });
}
});
e2e.pages.Explore.visit();
@ -57,10 +55,10 @@ describe('Exemplars', () => {
});
e2e().contains(dataSourceName).scrollIntoView().should('be.visible').click();
e2e.components.TimePicker.openButton().click();
e2e.components.TimePicker.fromField().clear().type('2021-05-11 19:30:00');
e2e.components.TimePicker.toField().clear().type('2021-05-11 21:40:00');
e2e.components.TimePicker.fromField().clear().type('2021-07-10 17:10:00');
e2e.components.TimePicker.toField().clear().type('2021-07-10 17:30:00');
e2e.components.TimePicker.applyTimeRange().click();
e2e.components.QueryField.container().should('be.visible').type('exemplar-query{shift}{enter}');
e2e.components.QueryField.container().should('be.visible').type('exemplar-query_bucket{shift}{enter}');
e2e.components.DataSource.Prometheus.exemplarMarker().first().trigger('mouseover');
e2e().contains('Query with gdev-tempo').click();

View File

@ -8,6 +8,7 @@ import (
"math"
"net/http"
"regexp"
"sort"
"strconv"
"strings"
"time"
@ -142,16 +143,16 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
span.SetTag("stop_unixnano", query.End.UnixNano())
defer span.Finish()
response := make(map[PrometheusQueryType]model.Value)
response := make(map[PrometheusQueryType]interface{})
timeRange := apiv1.Range{
Step: query.Step,
// Align query range to step. It rounds start and end down to a multiple of step.
Start: time.Unix(int64(math.Floor((float64(query.Start.Unix()+query.UtcOffsetSec)/query.Step.Seconds()))*query.Step.Seconds()-float64(query.UtcOffsetSec)), 0),
End: time.Unix(int64(math.Floor((float64(query.End.Unix()+query.UtcOffsetSec)/query.Step.Seconds()))*query.Step.Seconds()-float64(query.UtcOffsetSec)), 0),
}
if query.RangeQuery {
timeRange := apiv1.Range{
Step: query.Step,
// Align query range to step. It rounds start and end down to a multiple of step.
Start: time.Unix(int64(math.Floor((float64(query.Start.Unix()+query.UtcOffsetSec)/query.Step.Seconds()))*query.Step.Seconds()-float64(query.UtcOffsetSec)), 0),
End: time.Unix(int64(math.Floor((float64(query.End.Unix()+query.UtcOffsetSec)/query.Step.Seconds()))*query.Step.Seconds()-float64(query.UtcOffsetSec)), 0),
}
rangeResponse, _, err := client.QueryRange(ctx, query.Expr, timeRange)
if err != nil {
return &result, fmt.Errorf("query: %s failed with: %v", query.Expr, err)
@ -166,6 +167,15 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
}
response[Instant] = instantResponse
}
// For now, we ignore exemplar errors and continue with processing of other results
if query.ExemplarQuery {
exemplarResponse, err := client.QueryExemplars(ctx, query.Expr, timeRange.Start, timeRange.End)
if err != nil {
exemplarResponse = nil
plog.Error("Exemplar query", query.Expr, "failed with", err)
}
response[Exemplar] = exemplarResponse
}
frames, err := parseResponse(response, query)
if err != nil {
@ -299,53 +309,54 @@ func (s *Service) parseQuery(queryContext *backend.QueryDataRequest, dsInfo *Dat
}
qs = append(qs, &PrometheusQuery{
Expr: expr,
Step: interval,
LegendFormat: model.LegendFormat,
Start: query.TimeRange.From,
End: query.TimeRange.To,
RefId: query.RefID,
InstantQuery: model.InstantQuery,
RangeQuery: rangeQuery,
UtcOffsetSec: model.UtcOffsetSec,
Expr: expr,
Step: interval,
LegendFormat: model.LegendFormat,
Start: query.TimeRange.From,
End: query.TimeRange.To,
RefId: query.RefID,
InstantQuery: model.InstantQuery,
RangeQuery: rangeQuery,
ExemplarQuery: model.ExemplarQuery,
UtcOffsetSec: model.UtcOffsetSec,
})
}
return qs, nil
}
func parseResponse(value map[PrometheusQueryType]model.Value, query *PrometheusQuery) (data.Frames, error) {
allFrames := data.Frames{}
for queryType, value := range value {
var frames data.Frames
func parseResponse(value map[PrometheusQueryType]interface{}, query *PrometheusQuery) (data.Frames, error) {
frames := data.Frames{}
for _, value := range value {
matrix, ok := value.(model.Matrix)
if ok {
frames = matrixToDataFrames(matrix, query, queryType)
matrixFrames := matrixToDataFrames(matrix, query)
frames = append(frames, matrixFrames...)
continue
}
vector, ok := value.(model.Vector)
if ok {
frames = vectorToDataFrames(vector, query, queryType)
vectorFrames := vectorToDataFrames(vector, query)
frames = append(frames, vectorFrames...)
continue
}
scalar, ok := value.(*model.Scalar)
if ok {
frames = scalarToDataFrames(scalar, query, queryType)
scalarFrames := scalarToDataFrames(scalar, query)
frames = append(frames, scalarFrames...)
continue
}
for _, frame := range frames {
frame.Meta = &data.FrameMeta{
Custom: map[string]PrometheusQueryType{
"queryType": queryType,
},
}
exemplar, ok := value.([]apiv1.ExemplarQueryResult)
if ok {
exemplarFrames := exemplarToDataFrames(exemplar, query)
frames = append(frames, exemplarFrames...)
continue
}
allFrames = append(allFrames, frames...)
}
return allFrames, nil
return frames, nil
}
// IsAPIError returns whether err is or wraps a Prometheus error.
@ -378,7 +389,7 @@ func calculateRateInterval(interval time.Duration, scrapeInterval string, interv
return rateInterval
}
func matrixToDataFrames(matrix model.Matrix, query *PrometheusQuery, queryType PrometheusQueryType) data.Frames {
func matrixToDataFrames(matrix model.Matrix, query *PrometheusQuery) data.Frames {
frames := data.Frames{}
for _, v := range matrix {
@ -396,23 +407,34 @@ func matrixToDataFrames(matrix model.Matrix, query *PrometheusQuery, queryType P
frame := data.NewFrame(name,
data.NewField("Time", nil, timeVector),
data.NewField("Value", tags, values).SetConfig(&data.FieldConfig{DisplayNameFromDS: name}))
frame.Meta = &data.FrameMeta{
Custom: map[string]string{
"resultType": "matrix",
},
}
frames = append(frames, frame)
}
return frames
}
func scalarToDataFrames(scalar *model.Scalar, query *PrometheusQuery, queryType PrometheusQueryType) data.Frames {
func scalarToDataFrames(scalar *model.Scalar, query *PrometheusQuery) data.Frames {
timeVector := []time.Time{time.Unix(scalar.Timestamp.Unix(), 0).UTC()}
values := []float64{float64(scalar.Value)}
name := fmt.Sprintf("%g", values[0])
frame := data.NewFrame(name,
data.NewField("Time", nil, timeVector),
data.NewField("Value", nil, values).SetConfig(&data.FieldConfig{DisplayNameFromDS: name}))
frame.Meta = &data.FrameMeta{
Custom: map[string]string{
"resultType": "scalar",
},
}
frames := data.Frames{frame}
return frames
}
func vectorToDataFrames(vector model.Vector, query *PrometheusQuery, queryType PrometheusQueryType) data.Frames {
func vectorToDataFrames(vector model.Vector, query *PrometheusQuery) data.Frames {
frames := data.Frames{}
for _, v := range vector {
name := formatLegend(v.Metric, query)
@ -425,8 +447,146 @@ func vectorToDataFrames(vector model.Vector, query *PrometheusQuery, queryType P
frame := data.NewFrame(name,
data.NewField("Time", nil, timeVector),
data.NewField("Value", tags, values).SetConfig(&data.FieldConfig{DisplayNameFromDS: name}))
frame.Meta = &data.FrameMeta{
Custom: map[string]string{
"resultType": "vector",
},
}
frames = append(frames, frame)
}
return frames
}
func exemplarToDataFrames(response []apiv1.ExemplarQueryResult, query *PrometheusQuery) data.Frames {
frames := data.Frames{}
events := make([]ExemplarEvent, 0)
for _, exemplarData := range response {
for _, exemplar := range exemplarData.Exemplars {
event := ExemplarEvent{}
exemplarTime := time.Unix(exemplar.Timestamp.Unix(), 0).UTC()
event.Time = exemplarTime
event.Value = float64(exemplar.Value)
event.Labels = make(map[string]string)
for label, value := range exemplar.Labels {
event.Labels[string(label)] = string(value)
}
for seriesLabel, seriesValue := range exemplarData.SeriesLabels {
event.Labels[string(seriesLabel)] = string(seriesValue)
}
events = append(events, event)
}
}
//Sampling of exemplars
bucketedExemplars := make(map[string][]ExemplarEvent)
values := make([]float64, 0)
//Create bucketed exemplars based on aligned timestamp
for _, event := range events {
alignedTs := fmt.Sprintf("%.0f", math.Floor(float64(event.Time.Unix())/query.Step.Seconds())*query.Step.Seconds())
_, ok := bucketedExemplars[alignedTs]
if !ok {
bucketedExemplars[alignedTs] = make([]ExemplarEvent, 0)
}
bucketedExemplars[alignedTs] = append(bucketedExemplars[alignedTs], event)
values = append(values, event.Value)
}
//Calculate standard deviation
standardDeviation := deviation(values)
//Create slice with all of the bucketed exemplars
sampledBuckets := make([]string, len(bucketedExemplars))
for bucketTimes := range bucketedExemplars {
sampledBuckets = append(sampledBuckets, bucketTimes)
}
sort.Strings(sampledBuckets)
//Sample exemplars based ona value, so we are not showing too many of them
sampleExemplars := make([]ExemplarEvent, 0)
for _, bucket := range sampledBuckets {
exemplarsInBucket := bucketedExemplars[bucket]
if len(exemplarsInBucket) == 1 {
sampleExemplars = append(sampleExemplars, exemplarsInBucket[0])
} else {
bucketValues := make([]float64, len(exemplarsInBucket))
for _, exemplar := range exemplarsInBucket {
bucketValues = append(bucketValues, exemplar.Value)
}
sort.Slice(bucketValues, func(i, j int) bool {
return bucketValues[i] > bucketValues[j]
})
sampledBucketValues := make([]float64, 0)
for _, value := range bucketValues {
if len(sampledBucketValues) == 0 {
sampledBucketValues = append(sampledBucketValues, value)
} else {
// Then take values only when at least 2 standard deviation distance to previously taken value
prev := sampledBucketValues[len(sampledBucketValues)-1]
if standardDeviation != 0 && prev-value >= float64(2)*standardDeviation {
sampledBucketValues = append(sampledBucketValues, value)
}
}
}
for _, valueBucket := range sampledBucketValues {
for _, exemplar := range exemplarsInBucket {
if exemplar.Value == valueBucket {
sampleExemplars = append(sampleExemplars, exemplar)
}
}
}
}
}
// Create DF from sampled exemplars
timeVector := make([]time.Time, 0, len(sampleExemplars))
valuesVector := make([]float64, 0, len(sampleExemplars))
labelsVector := make(map[string][]string, len(sampleExemplars))
for _, exemplar := range sampleExemplars {
timeVector = append(timeVector, exemplar.Time)
valuesVector = append(valuesVector, exemplar.Value)
for label, value := range exemplar.Labels {
if labelsVector[label] == nil {
labelsVector[label] = make([]string, 0)
}
labelsVector[label] = append(labelsVector[label], value)
}
}
frame := data.NewFrame("exemplar",
data.NewField("Time", nil, timeVector),
data.NewField("Value", nil, valuesVector))
for label, vector := range labelsVector {
frame.Fields = append(frame.Fields, data.NewField(label, nil, vector))
}
frame.Meta = &data.FrameMeta{
Custom: map[string]PrometheusQueryType{
"resultType": "exemplar",
},
}
frames = append(frames, frame)
return frames
}
func deviation(values []float64) float64 {
var sum, mean, sd float64
valuesLen := float64(len(values))
for _, value := range values {
sum += value
}
mean = sum / valuesLen
for j := 0; j < len(values); j++ {
sd += math.Pow(values[j]-mean, 2)
}
return math.Sqrt(sd / (valuesLen - 1))
}

View File

@ -6,6 +6,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
p "github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
)
@ -349,6 +350,60 @@ func TestPrometheus_parseQuery(t *testing.T) {
}
func TestPrometheus_parseResponse(t *testing.T) {
t.Run("exemplars response should be sampled and parsed normally", func(t *testing.T) {
value := make(map[PrometheusQueryType]interface{})
exemplars := []apiv1.ExemplarQueryResult{
{
SeriesLabels: p.LabelSet{
"__name__": "tns_request_duration_seconds_bucket",
"instance": "app:80",
"job": "tns/app",
},
Exemplars: []apiv1.Exemplar{
{
Labels: p.LabelSet{"traceID": "test1"},
Value: 0.003535405,
Timestamp: p.TimeFromUnixNano(time.Now().Add(-2 * time.Minute).UnixNano()),
},
{
Labels: p.LabelSet{"traceID": "test2"},
Value: 0.005555605,
Timestamp: p.TimeFromUnixNano(time.Now().Add(-4 * time.Minute).UnixNano()),
},
{
Labels: p.LabelSet{"traceID": "test3"},
Value: 0.007545445,
Timestamp: p.TimeFromUnixNano(time.Now().Add(-6 * time.Minute).UnixNano()),
},
{
Labels: p.LabelSet{"traceID": "test4"},
Value: 0.009545445,
Timestamp: p.TimeFromUnixNano(time.Now().Add(-7 * time.Minute).UnixNano()),
},
},
},
}
value[Exemplar] = exemplars
query := &PrometheusQuery{
LegendFormat: "legend {{app}}",
}
res, err := parseResponse(value, query)
require.NoError(t, err)
// Test fields
require.Len(t, res, 1)
require.Equal(t, res[0].Name, "exemplar")
require.Equal(t, res[0].Fields[0].Name, "Time")
require.Equal(t, res[0].Fields[1].Name, "Value")
require.Len(t, res[0].Fields, 6)
// Test correct values (sampled to 2)
require.Equal(t, res[0].Fields[1].Len(), 2)
require.Equal(t, res[0].Fields[1].At(0), 0.009545445)
require.Equal(t, res[0].Fields[1].At(1), 0.003535405)
})
t.Run("matrix response should be parsed normally", func(t *testing.T) {
values := []p.SamplePair{
{Value: 1, Timestamp: 1000},
@ -357,7 +412,7 @@ func TestPrometheus_parseResponse(t *testing.T) {
{Value: 4, Timestamp: 4000},
{Value: 5, Timestamp: 5000},
}
value := make(map[PrometheusQueryType]p.Value)
value := make(map[PrometheusQueryType]interface{})
value[Range] = p.Matrix{
&p.SampleStream{
Metric: p.Metric{"app": "Application", "tag2": "tag2"},
@ -386,7 +441,7 @@ func TestPrometheus_parseResponse(t *testing.T) {
})
t.Run("vector response should be parsed normally", func(t *testing.T) {
value := make(map[PrometheusQueryType]p.Value)
value := make(map[PrometheusQueryType]interface{})
value[Range] = p.Vector{
&p.Sample{
Metric: p.Metric{"app": "Application", "tag2": "tag2"},
@ -405,6 +460,7 @@ func TestPrometheus_parseResponse(t *testing.T) {
require.Len(t, res[0].Fields, 2)
require.Len(t, res[0].Fields[0].Labels, 0)
require.Equal(t, res[0].Fields[0].Name, "Time")
require.Equal(t, res[0].Fields[0].Name, "Time")
require.Len(t, res[0].Fields[1].Labels, 2)
require.Equal(t, res[0].Fields[1].Labels.String(), "app=Application, tag2=tag2")
require.Equal(t, res[0].Fields[1].Name, "Value")
@ -416,7 +472,7 @@ func TestPrometheus_parseResponse(t *testing.T) {
})
t.Run("scalar response should be parsed normally", func(t *testing.T) {
value := make(map[PrometheusQueryType]p.Value)
value := make(map[PrometheusQueryType]interface{})
value[Range] = &p.Scalar{
Value: 1,
Timestamp: 1000,

View File

@ -15,15 +15,22 @@ type DatasourceInfo struct {
}
type PrometheusQuery struct {
Expr string
Step time.Duration
LegendFormat string
Start time.Time
End time.Time
RefId string
InstantQuery bool
RangeQuery bool
UtcOffsetSec int64
Expr string
Step time.Duration
LegendFormat string
Start time.Time
End time.Time
RefId string
InstantQuery bool
RangeQuery bool
ExemplarQuery bool
UtcOffsetSec int64
}
type ExemplarEvent struct {
Time time.Time
Value float64
Labels map[string]string
}
type QueryModel struct {
@ -34,6 +41,7 @@ type QueryModel struct {
StepMode string `json:"stepMode"`
RangeQuery bool `json:"range"`
InstantQuery bool `json:"instant"`
ExemplarQuery bool `json:"exemplar"`
IntervalFactor int64 `json:"intervalFactor"`
UtcOffsetSec int64 `json:"utcOffsetSec"`
}
@ -41,6 +49,7 @@ type QueryModel struct {
type PrometheusQueryType string
const (
Range PrometheusQueryType = "range"
Instant PrometheusQueryType = "instant"
Range PrometheusQueryType = "range"
Instant PrometheusQueryType = "instant"
Exemplar PrometheusQueryType = "exemplar"
)

View File

@ -2,32 +2,39 @@ import { GrafanaTheme2 } from '@grafana/data';
import { IconButton, InlineLabel, Tooltip, useStyles2 } from '@grafana/ui';
import { css, cx } from '@emotion/css';
import React, { useEffect, useState } from 'react';
import { usePrevious } from 'react-use';
import { PrometheusDatasource } from '../datasource';
import { filter } from 'rxjs/operators';
import { PromQuery } from '../types';
interface Props {
isEnabled: boolean;
onChange: (isEnabled: boolean) => void;
onChange: (exemplar: boolean) => void;
datasource: PrometheusDatasource;
refId: string;
query: PromQuery;
}
export function PromExemplarField({ datasource, onChange, isEnabled, refId }: Props) {
export function PromExemplarField({ datasource, onChange, query }: Props) {
const [error, setError] = useState<string | null>(null);
const styles = useStyles2(getStyles);
const prevError = usePrevious(error);
useEffect(() => {
const subscription = datasource.exemplarErrors.pipe(filter((value) => refId === value.refId)).subscribe((err) => {
setError(err.error);
});
return () => {
subscription.unsubscribe();
};
}, [datasource, refId]);
if (!datasource.exemplarsAvailable) {
setError('Exemplars for this query are not available');
onChange(false);
} else if (query.instant && !query.range) {
setError('Exemplars are not available for instant queries');
onChange(false);
} else {
setError(null);
if (prevError !== error) {
onChange(true);
}
}
}, [datasource.exemplarsAvailable, query.instant, query.range, onChange, prevError, error]);
const iconButtonStyles = cx(
{
[styles.activeIcon]: isEnabled,
[styles.activeIcon]: !!query.exemplar,
},
styles.eyeIcon
);
@ -39,11 +46,11 @@ export function PromExemplarField({ datasource, onChange, isEnabled, refId }: Pr
Exemplars
<IconButton
name="eye"
tooltip={isEnabled ? 'Disable query with exemplars' : 'Enable query with exemplars'}
tooltip={!!query.exemplar ? 'Disable query with exemplars' : 'Enable query with exemplars'}
disabled={!!error}
className={iconButtonStyles}
onClick={() => {
onChange(!isEnabled);
onChange(!query.exemplar);
}}
/>
</div>

View File

@ -1,24 +1,17 @@
import React from 'react';
import { render, screen } from '@testing-library/react';
import { PromExploreExtraFieldProps, PromExploreExtraField } from './PromExploreExtraField';
import { Observable } from 'rxjs';
const setup = (propOverrides?: PromExploreExtraFieldProps) => {
const queryType = 'range';
const stepValue = '1';
const query = { exemplar: false };
const datasource = { exemplarErrors: new Observable() };
const onStepChange = jest.fn();
const onQueryTypeChange = jest.fn();
const onKeyDownFunc = jest.fn();
const datasource = {};
const onChange = jest.fn();
const onRunQuery = jest.fn();
const props: any = {
queryType,
stepValue,
onChange,
onRunQuery,
query,
onStepChange,
onQueryTypeChange,
onKeyDownFunc,
datasource,
};

View File

@ -1,26 +1,21 @@
// Libraries
import React, { memo } from 'react';
import React, { memo, useEffect, useCallback } from 'react';
import { usePrevious } from 'react-use';
import { isEqual } from 'lodash';
import { css, cx } from '@emotion/css';
// Types
import { InlineFormLabel, RadioButtonGroup } from '@grafana/ui';
import { PromQuery } from '../types';
import { PromExemplarField } from './PromExemplarField';
import { PrometheusDatasource } from '../datasource';
export interface PromExploreExtraFieldProps {
queryType: string;
stepValue: string;
query: PromQuery;
onStepChange: (e: React.SyntheticEvent<HTMLInputElement>) => void;
onKeyDownFunc: (e: React.KeyboardEvent<HTMLInputElement>) => void;
onQueryTypeChange: (value: string) => void;
onChange: (value: PromQuery) => void;
onRunQuery: () => void;
datasource: PrometheusDatasource;
}
export const PromExploreExtraField: React.FC<PromExploreExtraFieldProps> = memo(
({ queryType, stepValue, query, onChange, onStepChange, onQueryTypeChange, onKeyDownFunc, datasource }) => {
({ query, datasource, onChange, onRunQuery }) => {
const rangeOptions = [
{ value: 'range', label: 'Range', description: 'Run query over a range of time.' },
{
@ -31,6 +26,56 @@ export const PromExploreExtraField: React.FC<PromExploreExtraFieldProps> = memo(
{ value: 'both', label: 'Both', description: 'Run an Instant query and a Range query.' },
];
const prevQuery = usePrevious(query);
// Setting default values
useEffect(() => {
if (query.exemplar === undefined) {
onChange({ ...query, exemplar: true });
}
if (!query.instant && !query.range) {
onChange({ ...query, instant: true, range: true });
}
}, [onChange, query]);
const onExemplarChange = useCallback(
(exemplar: boolean) => {
if (!isEqual(query, prevQuery) || exemplar !== query.exemplar) {
onChange({ ...query, exemplar });
}
},
[prevQuery, query, onChange]
);
function onChangeQueryStep(interval: string) {
onChange({ ...query, interval });
}
function onStepChange(e: React.SyntheticEvent<HTMLInputElement>) {
if (e.currentTarget.value !== query.interval) {
onChangeQueryStep(e.currentTarget.value);
}
}
function onReturnKeyDown(e: React.KeyboardEvent<HTMLInputElement>) {
if (e.key === 'Enter' && (e.shiftKey || e.ctrlKey)) {
onRunQuery();
}
}
function onQueryTypeChange(queryType: string) {
let nextQuery;
if (queryType === 'instant') {
nextQuery = { ...query, instant: true, range: false };
} else if (queryType === 'range') {
nextQuery = { ...query, instant: false, range: true };
} else {
nextQuery = { ...query, instant: true, range: true };
}
onChange(nextQuery);
}
return (
<div aria-label="Prometheus extra field" className="gf-form-inline">
{/*Query type field*/}
@ -46,7 +91,11 @@ export const PromExploreExtraField: React.FC<PromExploreExtraFieldProps> = memo(
>
<InlineFormLabel width="auto">Query type</InlineFormLabel>
<RadioButtonGroup options={rangeOptions} value={queryType} onChange={onQueryTypeChange} />
<RadioButtonGroup
options={rangeOptions}
value={query.range === query.instant ? 'both' : query.instant ? 'instant' : 'range'}
onChange={onQueryTypeChange}
/>
</div>
{/*Step field*/}
<div
@ -72,17 +121,12 @@ export const PromExploreExtraField: React.FC<PromExploreExtraFieldProps> = memo(
className="gf-form-input width-4"
placeholder={'auto'}
onChange={onStepChange}
onKeyDown={onKeyDownFunc}
value={stepValue}
onKeyDown={onReturnKeyDown}
value={query.interval ?? ''}
/>
</div>
<PromExemplarField
refId={query.refId}
isEnabled={Boolean(query.exemplar)}
onChange={(isEnabled) => onChange({ ...query, exemplar: isEnabled })}
datasource={datasource}
/>
<PromExemplarField onChange={onExemplarChange} datasource={datasource} query={query} />
</div>
);
}

View File

@ -1,11 +1,7 @@
import React, { memo, FC, useEffect } from 'react';
// Types
import React, { memo, FC } from 'react';
import { QueryEditorProps } from '@grafana/data';
import { PrometheusDatasource } from '../datasource';
import { PromQuery, PromOptions } from '../types';
import PromQueryField from './PromQueryField';
import { PromExploreExtraField } from './PromExploreExtraField';
@ -14,47 +10,6 @@ export type Props = QueryEditorProps<PrometheusDatasource, PromQuery, PromOption
export const PromExploreQueryEditor: FC<Props> = (props: Props) => {
const { range, query, data, datasource, history, onChange, onRunQuery } = props;
useEffect(() => {
if (query.exemplar === undefined) {
onChange({ ...query, exemplar: true });
}
if (!query.instant && !query.range) {
onChange({ ...query, instant: true, range: true });
}
}, [onChange, query]);
function onChangeQueryStep(value: string) {
const { query, onChange } = props;
const nextQuery = { ...query, interval: value };
onChange(nextQuery);
}
function onStepChange(e: React.SyntheticEvent<HTMLInputElement>) {
if (e.currentTarget.value !== query.interval) {
onChangeQueryStep(e.currentTarget.value);
}
}
function onReturnKeyDown(e: React.KeyboardEvent<HTMLInputElement>) {
if (e.key === 'Enter' && (e.shiftKey || e.ctrlKey)) {
onRunQuery();
}
}
function onQueryTypeChange(value: string) {
const { query, onChange } = props;
let nextQuery;
if (value === 'instant') {
nextQuery = { ...query, instant: true, range: false };
} else if (value === 'range') {
nextQuery = { ...query, instant: false, range: true };
} else {
nextQuery = { ...query, instant: true, range: true };
}
onChange(nextQuery);
}
return (
<PromQueryField
datasource={datasource}
@ -66,17 +21,7 @@ export const PromExploreQueryEditor: FC<Props> = (props: Props) => {
history={history}
data={data}
ExtraFieldElement={
<PromExploreExtraField
// Select "both" as default option when Explore is opened. In legacy requests, range and instant can be undefined. In this case, we want to run queries with "both".
queryType={query.range === query.instant ? 'both' : query.instant ? 'instant' : 'range'}
stepValue={query.interval || ''}
onQueryTypeChange={onQueryTypeChange}
onStepChange={onStepChange}
onKeyDownFunc={onReturnKeyDown}
query={query}
onChange={onChange}
datasource={datasource}
/>
<PromExploreExtraField query={query} onChange={onChange} datasource={datasource} onRunQuery={onRunQuery} />
}
/>
);

View File

@ -110,7 +110,7 @@ export class PromQueryEditor extends PureComponent<PromQueryEditorProps, State>
render() {
const { datasource, query, range, data } = this.props;
const { formatOption, instant, interval, intervalFactorOption, legendFormat, exemplar } = this.state;
const { formatOption, instant, interval, intervalFactorOption, legendFormat } = this.state;
return (
<PromQueryField
@ -197,12 +197,7 @@ export class PromQueryEditor extends PureComponent<PromQueryEditorProps, State>
/>
</InlineFormLabel>
</div>
<PromExemplarField
refId={query.refId}
isEnabled={exemplar}
onChange={this.onExemplarChange}
datasource={datasource}
/>
<PromExemplarField onChange={this.onExemplarChange} datasource={datasource} query={this.query} />
</div>
}
/>

View File

@ -3,8 +3,6 @@ import { render, RenderResult } from '@testing-library/react';
import { PromQueryEditorByApp } from './PromQueryEditorByApp';
import { CoreApp } from '@grafana/data';
import { noop } from 'lodash';
import { Observable } from 'rxjs';
import { first } from 'rxjs/operators';
import { PrometheusDatasource } from '../datasource';
import { testIds as alertingTestIds } from './PromQueryEditorForAlerting';
import { testIds as regularTestIds } from './PromQueryEditor';
@ -20,7 +18,6 @@ function setup(app: CoreApp): RenderResult {
getLabelKeys: () => [],
metrics: [],
},
exemplarErrors: new Observable().pipe(first()),
} as unknown) as PrometheusDatasource;
return render(

View File

@ -14,9 +14,7 @@ exports[`PromExploreQueryEditor should render component 1`] = `
}
}
onChange={[MockFunction]}
onKeyDownFunc={[Function]}
onQueryTypeChange={[Function]}
onStepChange={[Function]}
onRunQuery={[MockFunction]}
query={
Object {
"expr": "",
@ -24,8 +22,6 @@ exports[`PromExploreQueryEditor should render component 1`] = `
"refId": "A",
}
}
queryType="both"
stepValue="1s"
/>
}
data={

View File

@ -176,9 +176,16 @@ exports[`Render PromQueryEditor with basic options should render 1`] = `
"getPrometheusTime": [MockFunction],
}
}
isEnabled={true}
onChange={[Function]}
refId="A"
query={
Object {
"exemplar": true,
"expr": "",
"interval": "",
"legendFormat": "",
"refId": "A",
}
}
/>
</div>
}

View File

@ -1,5 +1,5 @@
import { cloneDeep, defaults } from 'lodash';
import { forkJoin, lastValueFrom, merge, Observable, of, OperatorFunction, pipe, Subject, throwError } from 'rxjs';
import { forkJoin, lastValueFrom, merge, Observable, of, OperatorFunction, pipe, throwError } from 'rxjs';
import { catchError, filter, map, tap } from 'rxjs/operators';
import LRU from 'lru-cache';
import {
@ -43,7 +43,6 @@ import { PrometheusVariableSupport } from './variables';
import PrometheusMetricFindQuery from './metric_find_query';
export const ANNOTATION_QUERY_STEP_DEFAULT = '60s';
const EXEMPLARS_NOT_AVAILABLE = 'Exemplars for this query are not available.';
const GET_AND_POST_METADATA_ENDPOINTS = ['api/v1/query', 'api/v1/query_range', 'api/v1/series', 'api/v1/labels'];
export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromOptions> {
@ -63,7 +62,7 @@ export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromO
exemplarTraceIdDestinations: ExemplarTraceIdDestination[] | undefined;
lookupsDisabled: boolean;
customQueryParameters: any;
exemplarErrors: Subject<{ refId: string; error: string | null }> = new Subject();
exemplarsAvailable: boolean;
constructor(
instanceSettings: DataSourceInstanceSettings<PromOptions>,
@ -88,10 +87,12 @@ export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromO
this.lookupsDisabled = instanceSettings.jsonData.disableMetricsLookup ?? false;
this.customQueryParameters = new URLSearchParams(instanceSettings.jsonData.customQueryParameters);
this.variables = new PrometheusVariableSupport(this, this.templateSrv, this.timeSrv);
this.exemplarsAvailable = true;
}
init = () => {
init = async () => {
this.loadRules();
this.exemplarsAvailable = await this.areExemplarsAvailable();
};
getQueryDisplayText(query: PromQuery) {
@ -271,13 +272,9 @@ export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromO
exemplarTarget.requestId += '_exemplar';
queries.push(this.createQuery(exemplarTarget, options, start, end));
activeTargets.push(exemplarTarget);
this.exemplarErrors.next({ refId: exemplarTarget.refId, error: null });
}
target.exemplar = false;
}
if (target.exemplar && target.instant) {
this.exemplarErrors.next({ refId: target.refId, error: 'Exemplars are not available for instant queries.' });
}
queries.push(this.createQuery(target, options, start, end));
activeTargets.push(target);
}
@ -289,23 +286,53 @@ export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromO
};
};
query(options: DataQueryRequest<PromQuery>): Observable<DataQueryResponse> {
shouldRunExemplarQuery(target: PromQuery): boolean {
/* We want to run exemplar query only for histogram metrics:
1. If we haven't processd histogram metrics yet, we need to check if expr includes "_bucket" which means that it is probably histogram metric (can rarely lead to false positive).
2. If we have processed histogram metrics, check if it is part of query expr.
*/
if (target.exemplar) {
const histogramMetrics = this.languageProvider.histogramMetrics;
if (histogramMetrics.length > 0) {
return !!histogramMetrics.find((metric) => target.expr.includes(metric));
} else {
return target.expr.includes('_bucket');
}
}
return false;
}
processTargetV2(target: PromQuery, request: DataQueryRequest<PromQuery>) {
const processedTarget = {
...target,
exemplar: this.shouldRunExemplarQuery(target),
requestId: request.panelId + target.refId,
// We need to pass utcOffsetSec to backend to calculate aligned range
utcOffsetSec: this.timeSrv.timeRange().to.utcOffset() * 60,
};
return processedTarget;
}
query(request: DataQueryRequest<PromQuery>): Observable<DataQueryResponse> {
// WIP - currently we want to run trough backend only if all queries are explore + range/instant queries
const shouldRunBackendQuery =
this.access === 'proxy' && options.app === CoreApp.Explore && !options.targets.some((query) => query.exemplar);
const shouldRunBackendQuery = this.access === 'proxy' && request.app === CoreApp.Explore;
if (shouldRunBackendQuery) {
const targets = options.targets.map((target) => ({
...target,
// We need to pass utcOffsetSec to backend to calculate aligned range
utcOffsetSec: this.timeSrv.timeRange().to.utcOffset() * 60,
}));
return super.query({ ...options, targets }).pipe(map((response) => transformV2(response, options)));
const targets = request.targets.map((target) => this.processTargetV2(target, request));
return super
.query({ ...request, targets })
.pipe(
map((response) =>
transformV2(response, request, { exemplarTraceIdDestinations: this.exemplarTraceIdDestinations })
)
);
// Run queries trough browser/proxy
} else {
const start = this.getPrometheusTime(options.range.from, false);
const end = this.getPrometheusTime(options.range.to, true);
const { queries, activeTargets } = this.prepareTargets(options, start, end);
const start = this.getPrometheusTime(request.range.from, false);
const end = this.getPrometheusTime(request.range.to, true);
const { queries, activeTargets } = this.prepareTargets(request, start, end);
// No valid targets, return the empty result to save a round trip.
if (!queries || !queries.length) {
@ -315,11 +342,11 @@ export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromO
});
}
if (options.app === CoreApp.Explore) {
if (request.app === CoreApp.Explore) {
return this.exploreQuery(queries, activeTargets, end);
}
return this.panelsQuery(queries, activeTargets, end, options.requestId, options.scopedVars);
return this.panelsQuery(queries, activeTargets, end, request.requestId, request.scopedVars);
}
}
@ -404,7 +431,6 @@ export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromO
if (query.exemplar) {
return this.getExemplars(query).pipe(
catchError(() => {
this.exemplarErrors.next({ refId: query.refId, error: EXEMPLARS_NOT_AVAILABLE });
return of({
data: [],
state: LoadingState.Done,
@ -785,6 +811,18 @@ export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromO
}
}
async areExemplarsAvailable() {
try {
const res = await this.metadataRequest('/api/v1/query_exemplars', { query: 'test' });
if (res.statusText === 'OK') {
return true;
}
return false;
} catch (err) {
return false;
}
}
modifyQuery(query: PromQuery, action: any): PromQuery {
let expression = query.expr ?? '';
switch (action.type) {

View File

@ -9,7 +9,7 @@ import {
addLimitInfo,
fixSummariesMetadata,
parseSelector,
processHistogramLabels,
processHistogramMetrics,
processLabels,
roundSecToMin,
} from './language_utils';
@ -123,8 +123,7 @@ export default class PromQlLanguageProvider extends LanguageProvider {
await this.fetchLabels();
this.metrics = (await this.fetchLabelValues('__name__')) || [];
this.metricsMetadata = fixSummariesMetadata(await this.request('/api/v1/metadata', {}));
this.processHistogramMetrics(this.metrics);
this.histogramMetrics = processHistogramMetrics(this.metrics).sort();
return [];
};
@ -132,14 +131,6 @@ export default class PromQlLanguageProvider extends LanguageProvider {
return this.labelKeys;
}
processHistogramMetrics = (data: string[]) => {
const { values } = processHistogramLabels(data);
if (values && values['__name__']) {
this.histogramMetrics = values['__name__'].slice().sort();
}
};
provideCompletionItems = async (
{ prefix, text, value, labelKey, wrapperClasses }: TypeaheadInput,
context: AutocompleteContext = {}

View File

@ -2,19 +2,17 @@ import { PromMetricsMetadata, PromMetricsMetadataItem } from './types';
import { addLabelToQuery } from './add_label_to_query';
import { SUGGESTIONS_LIMIT } from './language_provider';
export const processHistogramLabels = (labels: string[]) => {
export const processHistogramMetrics = (metrics: string[]) => {
const resultSet: Set<string> = new Set();
const regexp = new RegExp('_bucket($|:)');
for (let index = 0; index < labels.length; index++) {
const label = labels[index];
const isHistogramValue = regexp.test(label);
for (let index = 0; index < metrics.length; index++) {
const metric = metrics[index];
const isHistogramValue = regexp.test(metric);
if (isHistogramValue) {
resultSet.add(label);
resultSet.add(metric);
}
}
const result = [...resultSet];
return { values: { __name__: result } };
return [...resultSet];
};
export function processLabels(labels: Array<{ [key: string]: string }>, withName = false) {

View File

@ -53,7 +53,7 @@ describe('Prometheus Result Transformer', () => {
},
],
} as unknown) as DataQueryResponse;
const series = transformV2(response, options);
const series = transformV2(response, options, {});
expect(series).toEqual({
data: [{ fields: [], length: 2, meta: { preferredVisualisationType: 'graph' }, name: 'ALERTS', refId: 'A' }],
state: 'Done',
@ -86,7 +86,7 @@ describe('Prometheus Result Transformer', () => {
}),
],
} as unknown) as DataQueryResponse;
const series = transformV2(response, options);
const series = transformV2(response, options, {});
// expect(series.data[0]).toBe({});
expect(series.data[0].fields[0].name).toEqual('time');
expect(series.data[0].fields[1].name).toEqual('label1');
@ -137,7 +137,7 @@ describe('Prometheus Result Transformer', () => {
}),
],
} as unknown) as DataQueryResponse;
const series = transformV2(response, options);
const series = transformV2(response, options, {});
expect(series.data[0].fields.length).toEqual(2);
expect(series.data[0].meta?.preferredVisualisationType).toEqual('graph');
expect(series.data[1].fields.length).toEqual(4);

View File

@ -44,7 +44,7 @@ interface TimeAndValue {
const isTableResult = (dataFrame: DataFrame, options: DataQueryRequest<PromQuery>): boolean => {
// We want to process instant results in Explore as table
if ((options.app === CoreApp.Explore && dataFrame.meta?.custom?.queryType) === 'instant') {
if ((options.app === CoreApp.Explore && dataFrame.meta?.custom?.resultType) === 'vector') {
return true;
}
@ -58,19 +58,46 @@ const isTableResult = (dataFrame: DataFrame, options: DataQueryRequest<PromQuery
};
// V2 result trasnformer used to transform query results from queries that were run trough prometheus backend
export function transformV2(response: DataQueryResponse, options: DataQueryRequest<PromQuery>) {
const [tableResults, otherResults]: [DataFrame[], DataFrame[]] = partition(response.data, (dataFrame) =>
isTableResult(dataFrame, options)
export function transformV2(
response: DataQueryResponse,
request: DataQueryRequest<PromQuery>,
options: { exemplarTraceIdDestinations?: ExemplarTraceIdDestination[] }
) {
const [tableResults, results]: [DataFrame[], DataFrame[]] = partition(response.data, (dataFrame) =>
isTableResult(dataFrame, request)
);
// For table results, we need to transform data frames to table data frames
const responseLength = options.targets.filter((target) => !target.hide).length;
// TABLE FRAMES: For table results, we need to transform data frames to table data frames
const responseLength = request.targets.filter((target) => !target.hide).length;
const tableFrames = tableResults.map((dataFrame) => {
const df = transformDFoTable(dataFrame, responseLength);
return df;
});
// Everything else is processed as time_series result and graph preferredVisualisationType
const [exemplarResults, otherResults]: [DataFrame[], DataFrame[]] = partition(
results,
(dataFrame) => dataFrame.meta?.custom?.resultType === 'exemplar'
);
// EXEMPLAR FRAMES: We enrich exemplar frames with data links and add dataTopic meta info
const { exemplarTraceIdDestinations: destinations } = options;
const exemplarFrames = exemplarResults.map((dataFrame) => {
if (destinations?.length) {
for (const exemplarTraceIdDestination of destinations) {
const traceIDField = dataFrame.fields.find((field) => field.name === exemplarTraceIdDestination.name);
if (traceIDField) {
const links = getDataLinks(exemplarTraceIdDestination);
traceIDField.config.links = traceIDField.config.links?.length
? [...traceIDField.config.links, ...links]
: links;
}
}
}
return { ...dataFrame, meta: { ...dataFrame.meta, dataTopic: DataTopic.Annotations } };
});
// OTHER FRAMES: Everything else is processed as time_series result and graph preferredVisualisationType
const otherFrames = otherResults.map((dataFrame) => {
const df = {
...dataFrame,
@ -82,7 +109,7 @@ export function transformV2(response: DataQueryResponse, options: DataQueryReque
return df;
});
return { ...response, data: [...otherFrames, ...tableFrames] };
return { ...response, data: [...otherFrames, ...tableFrames, ...exemplarFrames] };
}
export function transformDFoTable(df: DataFrame, responseLength: number): DataFrame {
@ -186,7 +213,7 @@ export function transform(
// Add data links if configured
if (transformOptions.exemplarTraceIdDestinations?.length) {
for (const exemplarTraceIdDestination of transformOptions.exemplarTraceIdDestinations) {
const traceIDField = dataFrame.fields.find((field) => field.name === exemplarTraceIdDestination!.name);
const traceIDField = dataFrame.fields.find((field) => field.name === exemplarTraceIdDestination.name);
if (traceIDField) {
const links = getDataLinks(exemplarTraceIdDestination);
traceIDField.config.links = traceIDField.config.links?.length