Google Cloud Monitor: Prom query editor (#73503)

* revert

* works but needs clean up and tests

* clean up

* remove any

* change confusing query var to expr

* oops

* add test

* lint

* cleanup

* update docs

* Update public/app/plugins/datasource/cloud-monitoring/components/PromQLEditor.tsx

Co-authored-by: Andreas Christou <andreas.christou@grafana.com>

* nit

* lint fix?

* remove comment from cue

* go linter

* removing parsing stuff parseresponse func

---------

Co-authored-by: Andreas Christou <andreas.christou@grafana.com>
This commit is contained in:
Andrew Hackmann
2023-08-18 11:14:43 -05:00
committed by GitHub
parent 23ae1127a7
commit 42f4306251
23 changed files with 512 additions and 17 deletions

View File

@@ -38,7 +38,7 @@ func (s *Service) executeAnnotationQuery(ctx context.Context, req *backend.Query
if err != nil {
return resp, nil
}
err = parseToAnnotations(req.Queries[0].RefID, queryRes, dr, tslq.TimeSeriesList.Title, tslq.TimeSeriesList.Text)
err = parseToAnnotations(req.Queries[0].RefID, queryRes, dr.(cloudMonitoringResponse), tslq.TimeSeriesList.Title, tslq.TimeSeriesList.Text)
resp.Responses[firstQuery.RefID] = *queryRes
return resp, err

View File

@@ -60,6 +60,7 @@ const (
timeSeriesListQueryType = dataquery.QueryTypeTimeSeriesList
timeSeriesQueryQueryType = dataquery.QueryTypeTimeSeriesQuery
sloQueryType = dataquery.QueryTypeSlo
promQLQueryType = dataquery.QueryTypePromQL
crossSeriesReducerDefault = "REDUCE_NONE"
perSeriesAlignerDefault = "ALIGN_MEAN"
)
@@ -432,6 +433,15 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR
}
cmslo.setParams(startTime, endTime, durationSeconds, query.Interval.Milliseconds())
queryInterface = cmslo
case string(dataquery.QueryTypePromQL):
cmp := &cloudMonitoringProm{
refID: query.RefID,
logger: logger,
aliasBy: q.AliasBy,
parameters: q.PromQLQuery,
timeRange: req.Queries[0].TimeRange,
}
queryInterface = cmp
default:
return nil, fmt.Errorf("unrecognized query type %q", query.QueryType)
}

View File

@@ -67,6 +67,7 @@ const (
// Defines values for QueryType.
const (
QueryTypeAnnotation QueryType = "annotation"
QueryTypePromQL QueryType = "promQL"
QueryTypeSlo QueryType = "slo"
QueryTypeTimeSeriesList QueryType = "timeSeriesList"
QueryTypeTimeSeriesQuery QueryType = "timeSeriesQuery"
@@ -99,6 +100,9 @@ type CloudMonitoringQuery struct {
// Time interval in milliseconds.
IntervalMs *float32 `json:"intervalMs,omitempty"`
// PromQL sub-query properties.
PromQLQuery *PromQLQuery `json:"promQLQuery,omitempty"`
// SLO sub-query properties.
SloQuery *SLOQuery `json:"sloQuery,omitempty"`
@@ -220,6 +224,18 @@ type MetricQuery struct {
// Types of pre-processor available. Defined by the metric.
type PreprocessorType string
// PromQL sub-query properties.
type PromQLQuery struct {
// PromQL expression/query to be executed.
Expr string `json:"expr"`
// GCP project to execute the query against.
ProjectName string `json:"projectName"`
// PromQL min step
Step string `json:"step"`
}
// Defines the supported queryTypes.
type QueryType string

View File

@@ -0,0 +1,117 @@
package cloudmonitoring
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"path"
"strconv"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
jsoniter "github.com/json-iterator/go"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/util/converter"
)
func (promQLQ *cloudMonitoringProm) run(ctx context.Context, req *backend.QueryDataRequest,
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, any, string, error) {
dr := &backend.DataResponse{}
projectName, err := s.ensureProject(ctx, dsInfo, promQLQ.parameters.ProjectName)
if err != nil {
dr.Error = err
return dr, promResponse{}, "", nil
}
r, err := createRequest(ctx, promQLQ.logger, &dsInfo, path.Join("/v1/projects", projectName, "location/global/prometheus/api/v1/query_range"), nil)
if err != nil {
dr.Error = err
return dr, promResponse{}, "", nil
}
span := traceReq(ctx, tracer, req, dsInfo, r, "")
defer span.End()
requestBody := map[string]interface{}{
"query": promQLQ.parameters.Expr,
"end": formatTime(promQLQ.timeRange.To),
"start": formatTime(promQLQ.timeRange.From),
"step": promQLQ.parameters.Step,
}
res, err := doRequestProm(r, dsInfo, requestBody)
defer func() {
if err := res.Body.Close(); err != nil {
promQLQ.logger.Error("Failed to close response body", "err", err)
}
}()
if err != nil {
dr.Error = err
return dr, promResponse{}, "", nil
}
return dr, parseProm(res), r.URL.RawQuery, nil
}
func doRequestProm(r *http.Request, dsInfo datasourceInfo, body map[string]interface{}) (*http.Response, error) {
if body != nil {
buf, err := json.Marshal(body)
if err != nil {
return nil, err
}
r.Body = io.NopCloser(bytes.NewBuffer(buf))
r.Method = http.MethodPost
}
res, err := dsInfo.services[cloudMonitor].client.Do(r)
if err != nil {
return res, err
}
return res, nil
}
func parseProm(res *http.Response) backend.DataResponse {
iter := jsoniter.Parse(jsoniter.ConfigDefault, res.Body, 1024)
return converter.ReadPrometheusStyleResult(iter, converter.Options{
MatrixWideSeries: false,
VectorWideSeries: false,
Dataplane: false,
})
}
// We are not parsing the response in this function. ReadPrometheusStyleResult needs an open reader and we cannot
// pass an open reader to this function because lint complains as it is unsafe
func (promQLQ *cloudMonitoringProm) parseResponse(queryRes *backend.DataResponse,
response any, executedQueryString string) error {
r := response.(backend.DataResponse)
// Add frame to attach metadata
if len(r.Frames) == 0 {
r.Frames = append(r.Frames, data.NewFrame(""))
}
*queryRes = r
return nil
}
func (promQLQ *cloudMonitoringProm) buildDeepLink() string {
return ""
}
func (promQLQ *cloudMonitoringProm) getRefID() string {
return promQLQ.refID
}
func (promQLQ *cloudMonitoringProm) getAliasBy() string {
return promQLQ.aliasBy
}
func (promQLQ *cloudMonitoringProm) getParameter(i string) string {
return ""
}
func formatTime(t time.Time) string {
return strconv.FormatFloat(float64(t.Unix())+float64(t.Nanosecond())/1e9, 'f', -1, 64)
}

View File

@@ -0,0 +1,32 @@
package cloudmonitoring
import (
"io"
"net/http"
"os"
"strings"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/stretchr/testify/require"
)
func TestPromqlQuery(t *testing.T) {
t.Run("parseResponse is returned", func(t *testing.T) {
fileData, err := os.ReadFile("./test-data/11-prom-response.json")
reader := strings.NewReader(string(fileData))
res := http.Response{Body: io.NopCloser(reader)}
if err != nil {
t.Fatal(err)
}
require.NoError(t, err)
dataRes := &backend.DataResponse{}
query := &cloudMonitoringProm{}
parsedProm := parseProm(&res)
err = query.parseResponse(dataRes, parsedProm, "")
require.NoError(t, err)
frame := dataRes.Frames[0]
experimental.CheckGoldenJSONFrame(t, "test-data", "parse-response-is-returned", frame, false)
})
}

View File

@@ -12,13 +12,13 @@ import (
)
func (sloQ *cloudMonitoringSLO) run(ctx context.Context, req *backend.QueryDataRequest,
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, any, string, error) {
return runTimeSeriesRequest(ctx, sloQ.logger, req, s, dsInfo, tracer, sloQ.parameters.ProjectName, sloQ.params, nil)
}
func (sloQ *cloudMonitoringSLO) parseResponse(queryRes *backend.DataResponse,
response cloudMonitoringResponse, executedQueryString string) error {
return parseTimeSeriesResponse(queryRes, response, executedQueryString, sloQ, sloQ.params, []string{})
response any, executedQueryString string) error {
return parseTimeSeriesResponse(queryRes, response.(cloudMonitoringResponse), executedQueryString, sloQ, sloQ.params, []string{})
}
func (sloQ *cloudMonitoringSLO) buildDeepLink() string {

View File

@@ -0,0 +1,21 @@
{
"status": "success",
"data": {
"resultType": "matrix",
"result": [
{
"metric": {},
"values": [
[
1,
"1234"
],
[
2,
"12345"
]
]
}
]
}
}

View File

@@ -0,0 +1,73 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "timeseries-multi",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "resultType": "matrix"
// }
// }
// Name:
// Dimensions: 2 Fields by 2 Rows
// +-------------------------------+-----------------+
// | Name: Time | Name: Value |
// | Labels: | Labels: |
// | Type: []time.Time | Type: []float64 |
// +-------------------------------+-----------------+
// | 1970-01-01 00:00:01 +0000 UTC | 1234 |
// | 1970-01-01 00:00:02 +0000 UTC | 12345 |
// +-------------------------------+-----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"meta": {
"type": "timeseries-multi",
"typeVersion": [
0,
0
],
"custom": {
"resultType": "matrix"
}
},
"fields": [
{
"name": "Time",
"type": "time",
"typeInfo": {
"frame": "time.Time"
}
},
{
"name": "Value",
"type": "number",
"typeInfo": {
"frame": "float64"
},
"labels": {}
}
]
},
"data": {
"values": [
[
1000,
2000
],
[
1234,
12345
]
]
}
}
]
}

View File

@@ -16,7 +16,7 @@ import (
)
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) run(ctx context.Context, req *backend.QueryDataRequest,
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, any, string, error) {
return runTimeSeriesRequest(ctx, timeSeriesFilter.logger, req, s, dsInfo, tracer, timeSeriesFilter.parameters.ProjectName, timeSeriesFilter.params, nil)
}
@@ -56,8 +56,8 @@ func parseTimeSeriesResponse(queryRes *backend.DataResponse,
}
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) parseResponse(queryRes *backend.DataResponse,
response cloudMonitoringResponse, executedQueryString string) error {
return parseTimeSeriesResponse(queryRes, response, executedQueryString, timeSeriesFilter, timeSeriesFilter.params, timeSeriesFilter.parameters.GroupBys)
response any, executedQueryString string) error {
return parseTimeSeriesResponse(queryRes, response.(cloudMonitoringResponse), executedQueryString, timeSeriesFilter, timeSeriesFilter.params, timeSeriesFilter.parameters.GroupBys)
}
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) buildDeepLink() string {

View File

@@ -28,7 +28,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) appendGraphPeriod(req *ba
}
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context, req *backend.QueryDataRequest,
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, any, string, error) {
timeSeriesQuery.parameters.Query += timeSeriesQuery.appendGraphPeriod(req)
from := req.Queries[0].TimeRange.From
to := req.Queries[0].TimeRange.To
@@ -41,7 +41,8 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context,
}
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *backend.DataResponse,
response cloudMonitoringResponse, executedQueryString string) error {
res any, executedQueryString string) error {
response := res.(cloudMonitoringResponse)
frames := data.Frames{}
for _, series := range response.TimeSeriesData {

View File

@@ -20,8 +20,8 @@ import (
type (
cloudMonitoringQueryExecutor interface {
run(ctx context.Context, req *backend.QueryDataRequest, s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (
*backend.DataResponse, cloudMonitoringResponse, string, error)
parseResponse(dr *backend.DataResponse, data cloudMonitoringResponse, executedQueryString string) error
*backend.DataResponse, any, string, error)
parseResponse(dr *backend.DataResponse, data any, executedQueryString string) error
buildDeepLink() string
getRefID() string
getAliasBy() string
@@ -35,6 +35,7 @@ type (
TimeSeriesList *dataquery.TimeSeriesList `json:"timeSeriesList,omitempty"`
TimeSeriesQuery *dataquery.TimeSeriesQuery `json:"timeSeriesQuery,omitempty"`
SloQuery *dataquery.SLOQuery `json:"sloQuery,omitempty"`
PromQLQuery *dataquery.PromQLQuery `json:"promQLQuery,omitempty"`
}
cloudMonitoringTimeSeriesList struct {
@@ -55,6 +56,16 @@ type (
params url.Values
}
// cloudMonitoringProm is used to build a promQL queries
cloudMonitoringProm struct {
refID string
aliasBy string
logger log.Logger
parameters *dataquery.PromQLQuery
timeRange backend.TimeRange
IntervalMS int64
}
// cloudMonitoringTimeSeriesQuery is used to build MQL queries
cloudMonitoringTimeSeriesQuery struct {
refID string
@@ -89,6 +100,14 @@ type (
Unit string `json:"unit"`
NextPageToken string `json:"nextPageToken"`
}
promResponse struct {
Status string `json:"status"`
Data struct {
Result any `json:"result"`
ResultType string `json:"resultType"`
} `json:"data"`
}
)
type pointIterator interface {