Google Cloud Monitor: Prom query editor (#73503)

* revert

* works but needs clean up and tests

* clean up

* remove any

* change confusing query var to expr

* oops

* add test

* lint

* cleanup

* update docs

* Update public/app/plugins/datasource/cloud-monitoring/components/PromQLEditor.tsx

Co-authored-by: Andreas Christou <andreas.christou@grafana.com>

* nit

* lint fix?

* remove comment from cue

* go linter

* removing parsing stuff parseresponse func

---------

Co-authored-by: Andreas Christou <andreas.christou@grafana.com>
This commit is contained in:
Andrew Hackmann 2023-08-18 11:14:43 -05:00 committed by GitHub
parent 23ae1127a7
commit 42f4306251
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 512 additions and 17 deletions

View File

@ -218,8 +218,6 @@ To understand basic MQL concepts, refer to [Introduction to Monitoring Query Lan
**To create an MQL query:**
1. Select the **Metrics** option in the **Query Type** dropdown.
1. Select **<> Edit MQL** next to the **Query Type** field.
This toggles the MQL query builder mode.
1. Select a project from the **Project** dropdown.
1. Enter your MQL query in the text area.
@ -274,6 +272,15 @@ The **Alias By** field helps you control the format of legend keys for SLO queri
SLO queries use the same alignment period functionality as [metric queries](#define-the-alignment-period).
### Create a Prometheus query
**To create an Prometheus query:**
1. Select the **PromQL** option in the **Query Type** dropdown.
1. Select a project from the **Project** dropdown.
1. Enter your Prometheus query in the text area.
1. Enter a Min Step interval. The **Min step** setting defines the lower bounds on the interval between data points. For example, set this to `1h` to hint that measurements are taken hourly. This setting supports the `$__interval` and `$__rate_interval` macros.
## Apply annotations
{{< figure src="/static/img/docs/google-cloud-monitoring/annotations-8-0.png" max-width= "400px" class="docs-image--right" >}}

View File

@ -22,6 +22,10 @@ export interface CloudMonitoringQuery extends common.DataQuery {
* Time interval in milliseconds.
*/
intervalMs?: number;
/**
* PromQL sub-query properties.
*/
promQLQuery?: PromQLQuery;
/**
* SLO sub-query properties.
*/
@ -43,6 +47,7 @@ export interface CloudMonitoringQuery extends common.DataQuery {
*/
export enum QueryType {
ANNOTATION = 'annotation',
PROMQL = 'promQL',
SLO = 'slo',
TIME_SERIES_LIST = 'timeSeriesList',
TIME_SERIES_QUERY = 'timeSeriesQuery',
@ -189,6 +194,24 @@ export interface SLOQuery {
sloName: string;
}
/**
* PromQL sub-query properties.
*/
export interface PromQLQuery {
/**
* PromQL expression/query to be executed.
*/
expr: string;
/**
* GCP project to execute the query against.
*/
projectName: string;
/**
* PromQL min step
*/
step: string;
}
/**
* @deprecated This type is for migration purposes only. Replaced by TimeSeriesList Metric sub-query properties.
*/

View File

@ -38,7 +38,7 @@ func (s *Service) executeAnnotationQuery(ctx context.Context, req *backend.Query
if err != nil {
return resp, nil
}
err = parseToAnnotations(req.Queries[0].RefID, queryRes, dr, tslq.TimeSeriesList.Title, tslq.TimeSeriesList.Text)
err = parseToAnnotations(req.Queries[0].RefID, queryRes, dr.(cloudMonitoringResponse), tslq.TimeSeriesList.Title, tslq.TimeSeriesList.Text)
resp.Responses[firstQuery.RefID] = *queryRes
return resp, err

View File

@ -60,6 +60,7 @@ const (
timeSeriesListQueryType = dataquery.QueryTypeTimeSeriesList
timeSeriesQueryQueryType = dataquery.QueryTypeTimeSeriesQuery
sloQueryType = dataquery.QueryTypeSlo
promQLQueryType = dataquery.QueryTypePromQL
crossSeriesReducerDefault = "REDUCE_NONE"
perSeriesAlignerDefault = "ALIGN_MEAN"
)
@ -432,6 +433,15 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR
}
cmslo.setParams(startTime, endTime, durationSeconds, query.Interval.Milliseconds())
queryInterface = cmslo
case string(dataquery.QueryTypePromQL):
cmp := &cloudMonitoringProm{
refID: query.RefID,
logger: logger,
aliasBy: q.AliasBy,
parameters: q.PromQLQuery,
timeRange: req.Queries[0].TimeRange,
}
queryInterface = cmp
default:
return nil, fmt.Errorf("unrecognized query type %q", query.QueryType)
}

View File

@ -67,6 +67,7 @@ const (
// Defines values for QueryType.
const (
QueryTypeAnnotation QueryType = "annotation"
QueryTypePromQL QueryType = "promQL"
QueryTypeSlo QueryType = "slo"
QueryTypeTimeSeriesList QueryType = "timeSeriesList"
QueryTypeTimeSeriesQuery QueryType = "timeSeriesQuery"
@ -99,6 +100,9 @@ type CloudMonitoringQuery struct {
// Time interval in milliseconds.
IntervalMs *float32 `json:"intervalMs,omitempty"`
// PromQL sub-query properties.
PromQLQuery *PromQLQuery `json:"promQLQuery,omitempty"`
// SLO sub-query properties.
SloQuery *SLOQuery `json:"sloQuery,omitempty"`
@ -220,6 +224,18 @@ type MetricQuery struct {
// Types of pre-processor available. Defined by the metric.
type PreprocessorType string
// PromQL sub-query properties.
type PromQLQuery struct {
// PromQL expression/query to be executed.
Expr string `json:"expr"`
// GCP project to execute the query against.
ProjectName string `json:"projectName"`
// PromQL min step
Step string `json:"step"`
}
// Defines the supported queryTypes.
type QueryType string

View File

@ -0,0 +1,117 @@
package cloudmonitoring
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"path"
"strconv"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
jsoniter "github.com/json-iterator/go"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/util/converter"
)
func (promQLQ *cloudMonitoringProm) run(ctx context.Context, req *backend.QueryDataRequest,
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, any, string, error) {
dr := &backend.DataResponse{}
projectName, err := s.ensureProject(ctx, dsInfo, promQLQ.parameters.ProjectName)
if err != nil {
dr.Error = err
return dr, promResponse{}, "", nil
}
r, err := createRequest(ctx, promQLQ.logger, &dsInfo, path.Join("/v1/projects", projectName, "location/global/prometheus/api/v1/query_range"), nil)
if err != nil {
dr.Error = err
return dr, promResponse{}, "", nil
}
span := traceReq(ctx, tracer, req, dsInfo, r, "")
defer span.End()
requestBody := map[string]interface{}{
"query": promQLQ.parameters.Expr,
"end": formatTime(promQLQ.timeRange.To),
"start": formatTime(promQLQ.timeRange.From),
"step": promQLQ.parameters.Step,
}
res, err := doRequestProm(r, dsInfo, requestBody)
defer func() {
if err := res.Body.Close(); err != nil {
promQLQ.logger.Error("Failed to close response body", "err", err)
}
}()
if err != nil {
dr.Error = err
return dr, promResponse{}, "", nil
}
return dr, parseProm(res), r.URL.RawQuery, nil
}
func doRequestProm(r *http.Request, dsInfo datasourceInfo, body map[string]interface{}) (*http.Response, error) {
if body != nil {
buf, err := json.Marshal(body)
if err != nil {
return nil, err
}
r.Body = io.NopCloser(bytes.NewBuffer(buf))
r.Method = http.MethodPost
}
res, err := dsInfo.services[cloudMonitor].client.Do(r)
if err != nil {
return res, err
}
return res, nil
}
func parseProm(res *http.Response) backend.DataResponse {
iter := jsoniter.Parse(jsoniter.ConfigDefault, res.Body, 1024)
return converter.ReadPrometheusStyleResult(iter, converter.Options{
MatrixWideSeries: false,
VectorWideSeries: false,
Dataplane: false,
})
}
// We are not parsing the response in this function. ReadPrometheusStyleResult needs an open reader and we cannot
// pass an open reader to this function because lint complains as it is unsafe
func (promQLQ *cloudMonitoringProm) parseResponse(queryRes *backend.DataResponse,
response any, executedQueryString string) error {
r := response.(backend.DataResponse)
// Add frame to attach metadata
if len(r.Frames) == 0 {
r.Frames = append(r.Frames, data.NewFrame(""))
}
*queryRes = r
return nil
}
func (promQLQ *cloudMonitoringProm) buildDeepLink() string {
return ""
}
func (promQLQ *cloudMonitoringProm) getRefID() string {
return promQLQ.refID
}
func (promQLQ *cloudMonitoringProm) getAliasBy() string {
return promQLQ.aliasBy
}
func (promQLQ *cloudMonitoringProm) getParameter(i string) string {
return ""
}
func formatTime(t time.Time) string {
return strconv.FormatFloat(float64(t.Unix())+float64(t.Nanosecond())/1e9, 'f', -1, 64)
}

View File

@ -0,0 +1,32 @@
package cloudmonitoring
import (
"io"
"net/http"
"os"
"strings"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/stretchr/testify/require"
)
func TestPromqlQuery(t *testing.T) {
t.Run("parseResponse is returned", func(t *testing.T) {
fileData, err := os.ReadFile("./test-data/11-prom-response.json")
reader := strings.NewReader(string(fileData))
res := http.Response{Body: io.NopCloser(reader)}
if err != nil {
t.Fatal(err)
}
require.NoError(t, err)
dataRes := &backend.DataResponse{}
query := &cloudMonitoringProm{}
parsedProm := parseProm(&res)
err = query.parseResponse(dataRes, parsedProm, "")
require.NoError(t, err)
frame := dataRes.Frames[0]
experimental.CheckGoldenJSONFrame(t, "test-data", "parse-response-is-returned", frame, false)
})
}

View File

@ -12,13 +12,13 @@ import (
)
func (sloQ *cloudMonitoringSLO) run(ctx context.Context, req *backend.QueryDataRequest,
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, any, string, error) {
return runTimeSeriesRequest(ctx, sloQ.logger, req, s, dsInfo, tracer, sloQ.parameters.ProjectName, sloQ.params, nil)
}
func (sloQ *cloudMonitoringSLO) parseResponse(queryRes *backend.DataResponse,
response cloudMonitoringResponse, executedQueryString string) error {
return parseTimeSeriesResponse(queryRes, response, executedQueryString, sloQ, sloQ.params, []string{})
response any, executedQueryString string) error {
return parseTimeSeriesResponse(queryRes, response.(cloudMonitoringResponse), executedQueryString, sloQ, sloQ.params, []string{})
}
func (sloQ *cloudMonitoringSLO) buildDeepLink() string {

View File

@ -0,0 +1,21 @@
{
"status": "success",
"data": {
"resultType": "matrix",
"result": [
{
"metric": {},
"values": [
[
1,
"1234"
],
[
2,
"12345"
]
]
}
]
}
}

View File

@ -0,0 +1,73 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// }
// }
// Name:
// Dimensions: 2 Fields by 2 Rows
// +-------------------------------+-----------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+-----------------+
// | 1970-01-01 00:00:01 +0000 UTC | 1234 |
// | 1970-01-01 00:00:02 +0000 UTC | 12345 |
// +-------------------------------+-----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {}
}
]
},
"data": {
"values": [
[
1000,
2000
],
[
1234,
12345
]
]
}
}
]
}

View File

@ -16,7 +16,7 @@ import (
)
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) run(ctx context.Context, req *backend.QueryDataRequest,
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, any, string, error) {
return runTimeSeriesRequest(ctx, timeSeriesFilter.logger, req, s, dsInfo, tracer, timeSeriesFilter.parameters.ProjectName, timeSeriesFilter.params, nil)
}
@ -56,8 +56,8 @@ func parseTimeSeriesResponse(queryRes *backend.DataResponse,
}
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) parseResponse(queryRes *backend.DataResponse,
response cloudMonitoringResponse, executedQueryString string) error {
return parseTimeSeriesResponse(queryRes, response, executedQueryString, timeSeriesFilter, timeSeriesFilter.params, timeSeriesFilter.parameters.GroupBys)
response any, executedQueryString string) error {
return parseTimeSeriesResponse(queryRes, response.(cloudMonitoringResponse), executedQueryString, timeSeriesFilter, timeSeriesFilter.params, timeSeriesFilter.parameters.GroupBys)
}
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) buildDeepLink() string {

View File

@ -28,7 +28,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) appendGraphPeriod(req *ba
}
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context, req *backend.QueryDataRequest,
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, any, string, error) {
timeSeriesQuery.parameters.Query += timeSeriesQuery.appendGraphPeriod(req)
from := req.Queries[0].TimeRange.From
to := req.Queries[0].TimeRange.To
@ -41,7 +41,8 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context,
}
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *backend.DataResponse,
response cloudMonitoringResponse, executedQueryString string) error {
res any, executedQueryString string) error {
response := res.(cloudMonitoringResponse)
frames := data.Frames{}
for _, series := range response.TimeSeriesData {

View File

@ -20,8 +20,8 @@ import (
type (
cloudMonitoringQueryExecutor interface {
run(ctx context.Context, req *backend.QueryDataRequest, s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (
*backend.DataResponse, cloudMonitoringResponse, string, error)
parseResponse(dr *backend.DataResponse, data cloudMonitoringResponse, executedQueryString string) error
*backend.DataResponse, any, string, error)
parseResponse(dr *backend.DataResponse, data any, executedQueryString string) error
buildDeepLink() string
getRefID() string
getAliasBy() string
@ -35,6 +35,7 @@ type (
TimeSeriesList *dataquery.TimeSeriesList `json:"timeSeriesList,omitempty"`
TimeSeriesQuery *dataquery.TimeSeriesQuery `json:"timeSeriesQuery,omitempty"`
SloQuery *dataquery.SLOQuery `json:"sloQuery,omitempty"`
PromQLQuery *dataquery.PromQLQuery `json:"promQLQuery,omitempty"`
}
cloudMonitoringTimeSeriesList struct {
@ -55,6 +56,16 @@ type (
params url.Values
}
// cloudMonitoringProm is used to build a promQL queries
cloudMonitoringProm struct {
refID string
aliasBy string
logger log.Logger
parameters *dataquery.PromQLQuery
timeRange backend.TimeRange
IntervalMS int64
}
// cloudMonitoringTimeSeriesQuery is used to build MQL queries
cloudMonitoringTimeSeriesQuery struct {
refID string
@ -89,6 +100,14 @@ type (
Unit string `json:"unit"`
NextPageToken string `json:"nextPageToken"`
}
promResponse struct {
Status string `json:"status"`
Data struct {
Result any `json:"result"`
ResultType string `json:"resultType"`
} `json:"data"`
}
)
type pointIterator interface {

View File

@ -0,0 +1,93 @@
import { css, cx } from '@emotion/css';
import React from 'react';
import { SelectableValue } from '@grafana/data';
import { EditorRow } from '@grafana/experimental';
import { TextArea, InlineFormLabel } from '@grafana/ui';
import CloudMonitoringDatasource from '../datasource';
import { PromQLQuery } from '../types/query';
import { Project } from './Project';
export interface Props {
refId: string;
variableOptionGroup: SelectableValue<string>;
onChange: (query: PromQLQuery) => void;
onRunQuery: () => void;
query: PromQLQuery;
datasource: CloudMonitoringDatasource;
}
export const defaultQuery: (dataSource: CloudMonitoringDatasource) => PromQLQuery = (dataSource) => ({
projectName: dataSource.getDefaultProject(),
expr: '',
step: '10s',
});
export function PromQLQueryEditor({
refId,
query,
datasource,
onChange,
variableOptionGroup,
onRunQuery,
}: React.PropsWithChildren<Props>) {
function onReturnKeyDown(e: React.KeyboardEvent) {
if (e.key === 'Enter' && e.shiftKey) {
onRunQuery();
e.preventDefault();
e.stopPropagation();
}
}
return (
<>
<EditorRow>
<Project
refId={refId}
templateVariableOptions={variableOptionGroup.options}
projectName={query.projectName}
datasource={datasource}
onChange={(projectName) => onChange({ ...query, projectName })}
/>
<TextArea
name="Query"
className="slate-query-field"
value={query.expr}
rows={10}
placeholder="Enter a Cloud Monitoring Prometheus query (Run with Shift+Enter)"
onBlur={onRunQuery}
onKeyDown={onReturnKeyDown}
onChange={(e) => onChange({ ...query, expr: e.currentTarget.value })}
/>
<div
className={cx(
'gf-form',
css`
flex-wrap: nowrap;
`
)}
aria-label="Step field"
>
<InlineFormLabel
width={6}
tooltip={
'Time units and built-in variables can be used here, for example: $__interval, $__rate_interval, 5s, 1m, 3h, 1d, 1y (Default if no unit is specified: 10s)'
}
>
Min step
</InlineFormLabel>
<input
type={'string'}
className="gf-form-input width-4"
placeholder={'auto'}
onChange={(e) => onChange({ ...query, step: e.currentTarget.value })}
onKeyDown={onReturnKeyDown}
value={query.step ?? ''}
/>
</div>
</EditorRow>
</>
);
}

View File

@ -4,9 +4,10 @@ import { QueryEditorProps, toOption } from '@grafana/data';
import { EditorRows } from '@grafana/experimental';
import CloudMonitoringDatasource from '../datasource';
import { CloudMonitoringQuery, QueryType, SLOQuery } from '../types/query';
import { CloudMonitoringQuery, PromQLQuery, QueryType, SLOQuery } from '../types/query';
import { CloudMonitoringOptions } from '../types/types';
import { PromQLQueryEditor } from './PromQLEditor';
import { QueryHeader } from './QueryHeader';
import { defaultQuery as defaultSLOQuery } from './SLOQueryEditor';
@ -35,6 +36,14 @@ export const QueryEditor = (props: Props) => {
onRunQuery();
};
const promQLQuery = {
...{ projectName: datasource.getDefaultProject(), expr: '', step: '10s' },
...query.promQLQuery,
};
const onPromQLQueryChange = (q: PromQLQuery) => {
onChange({ ...query, promQLQuery: q });
};
const meta = props.data?.series.length ? props.data?.series[0].meta : {};
const customMetaData = meta?.custom ?? {};
const variableOptionGroup = {
@ -54,6 +63,18 @@ export const QueryEditor = (props: Props) => {
return (
<EditorRows>
<QueryHeader query={query} onChange={onChange} onRunQuery={onRunQuery} />
{queryType === QueryType.PROMQL && (
<PromQLQueryEditor
refId={query.refId}
variableOptionGroup={variableOptionGroup}
onChange={onPromQLQueryChange}
onRunQuery={onRunQuery}
datasource={datasource}
query={promQLQuery}
/>
)}
{queryType !== QueryType.SLO && (
<MetricQueryEditor
refId={query.refId}

View File

@ -33,4 +33,17 @@ describe('QueryHeader', () => {
await select(screen.getByLabelText('Select options menu'), 'MQL');
expect(onChange).toBeCalledWith(expect.objectContaining({ queryType: QueryType.TIME_SERIES_QUERY }));
});
it('can change query types to PromQL', async () => {
const query = createMockQuery();
const onChange = jest.fn();
const onRunQuery = jest.fn();
render(<QueryHeader query={query} onChange={onChange} onRunQuery={onRunQuery} />);
const queryType = screen.getByLabelText(/Query type/);
await openMenu(queryType);
await select(screen.getByLabelText('Select options menu'), 'PromQL');
expect(onChange).toBeCalledWith(expect.objectContaining({ queryType: QueryType.PROMQL }));
});
});

View File

@ -313,4 +313,5 @@ export const QUERY_TYPES = [
{ label: 'Builder', value: QueryType.TIME_SERIES_LIST },
{ label: 'MQL', value: QueryType.TIME_SERIES_QUERY },
{ label: 'Service Level Objectives (SLO)', value: QueryType.SLO },
{ label: 'PromQL', value: QueryType.PROMQL },
];

View File

@ -36,12 +36,14 @@ composableKinds: DataQuery: {
timeSeriesQuery?: #TimeSeriesQuery
// SLO sub-query properties.
sloQuery?: #SLOQuery
// PromQL sub-query properties.
promQLQuery?: #PromQLQuery
// Time interval in milliseconds.
intervalMs?: number
} @cuetsy(kind="interface")
// Defines the supported queryTypes.
#QueryType: "timeSeriesList" | "timeSeriesQuery" | "slo" | "annotation" @cuetsy(kind="enum", memberNames="TIME_SERIES_LIST|TIME_SERIES_QUERY|SLO|ANNOTATION")
#QueryType: "timeSeriesList" | "timeSeriesQuery" | "slo" | "annotation" | "promQL" @cuetsy(kind="enum", memberNames="TIME_SERIES_LIST|TIME_SERIES_QUERY|SLO|ANNOTATION|PROMQL")
// Time Series List sub-query properties.
#TimeSeriesList: {
@ -116,6 +118,16 @@ composableKinds: DataQuery: {
lookbackPeriod?: string
} @cuetsy(kind="interface")
// PromQL sub-query properties.
#PromQLQuery: {
// GCP project to execute the query against.
projectName: string
// PromQL expression/query to be executed.
expr: string
// PromQL min step
step: string
} @cuetsy(kind="interface")
// @deprecated This type is for migration purposes only. Replaced by TimeSeriesList Metric sub-query properties.
#MetricQuery: {
// GCP project to execute the query against.

View File

@ -19,6 +19,10 @@ export interface CloudMonitoringQuery extends common.DataQuery {
* Time interval in milliseconds.
*/
intervalMs?: number;
/**
* PromQL sub-query properties.
*/
promQLQuery?: PromQLQuery;
/**
* SLO sub-query properties.
*/
@ -40,6 +44,7 @@ export interface CloudMonitoringQuery extends common.DataQuery {
*/
export enum QueryType {
ANNOTATION = 'annotation',
PROMQL = 'promQL',
SLO = 'slo',
TIME_SERIES_LIST = 'timeSeriesList',
TIME_SERIES_QUERY = 'timeSeriesQuery',
@ -186,6 +191,24 @@ export interface SLOQuery {
sloName: string;
}
/**
* PromQL sub-query properties.
*/
export interface PromQLQuery {
/**
* PromQL expression/query to be executed.
*/
expr: string;
/**
* GCP project to execute the query against.
*/
projectName: string;
/**
* PromQL min step
*/
step: string;
}
/**
* @deprecated This type is for migration purposes only. Replaced by TimeSeriesList Metric sub-query properties.
*/

View File

@ -55,7 +55,7 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
}
applyTemplateVariables(target: CloudMonitoringQuery, scopedVars: ScopedVars): Record<string, any> {
const { timeSeriesList, timeSeriesQuery, sloQuery } = target;
const { timeSeriesList, timeSeriesQuery, sloQuery, promQLQuery } = target;
return {
...target,
@ -79,6 +79,7 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
),
},
sloQuery: sloQuery && this.interpolateProps(sloQuery, scopedVars),
promQLQuery: promQLQuery && this.interpolateProps(promQLQuery, scopedVars),
};
}
@ -323,6 +324,12 @@ export default class CloudMonitoringDatasource extends DataSourceWithBackend<
return !!query.timeSeriesList && !!query.timeSeriesList.projectName && !!getMetricType(query.timeSeriesList);
}
if (query.queryType === QueryType.PROMQL) {
return (
!!query.promQLQuery && !!query.promQLQuery.projectName && !!query.promQLQuery.expr && !!query.promQLQuery.step
);
}
return false;
}

View File

@ -28,13 +28,15 @@ getAppEvents().subscribe<DashboardLoadedEvent<CloudMonitoringQuery>>(
[QueryType.TIME_SERIES_LIST]: 0,
[QueryType.SLO]: 0,
[QueryType.ANNOTATION]: 0,
[QueryType.PROMQL]: 0,
};
cloudmonitorQueries.forEach((query) => {
if (
query.queryType === QueryType.TIME_SERIES_QUERY ||
query.queryType === QueryType.TIME_SERIES_LIST ||
query.queryType === QueryType.SLO ||
query.queryType === QueryType.ANNOTATION
query.queryType === QueryType.ANNOTATION ||
query.queryType === QueryType.PROMQL
) {
stats[query.queryType]++;
} else if (query.queryType === 'metrics') {
@ -58,6 +60,7 @@ getAppEvents().subscribe<DashboardLoadedEvent<CloudMonitoringQuery>>(
time_series_filter_queries: stats[QueryType.TIME_SERIES_LIST],
slo_queries: stats[QueryType.SLO],
annotation_queries: stats[QueryType.ANNOTATION],
promQL_queries: stats[QueryType.PROMQL],
});
}
}

View File

@ -20,4 +20,6 @@ export type CloudMonitoringDashboardLoadedProps = {
slo_queries: number;
/** number of non hidden queries of type annotation if any */
annotation_queries: number;
/** number of non hidden queries of type PromQL if any */
promQL_queries: number;
};

View File

@ -13,6 +13,7 @@ export {
AlignmentTypes,
ValueTypes,
MetricFindQueryTypes,
PromQLQuery,
} from '../dataquery.gen';
/**