Google Cloud Monitor: Use generated type from schema in backend (#67879)

This commit is contained in:
Alyssa Bull 2023-07-31 11:14:27 -06:00 committed by GitHub
parent b632eb33b9
commit adc3735122
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 156 additions and 193 deletions

View File

@ -31,7 +31,7 @@ export interface CloudMonitoringQuery extends common.DataQuery {
* queryType: #QueryType
* Time Series List sub-query properties.
*/
timeSeriesList?: (TimeSeriesList | AnnotationQuery);
timeSeriesList?: TimeSeriesList;
/**
* Time Series sub-query properties.
*/
@ -96,6 +96,14 @@ export interface TimeSeriesList {
* Only present if a preprocessor is selected. Alignment function to be used. Defaults to ALIGN_MEAN.
*/
secondaryPerSeriesAligner?: string;
/**
* Annotation text.
*/
text?: string;
/**
* Annotation title.
*/
title?: string;
/**
* Data view, defaults to FULL.
*/
@ -117,20 +125,6 @@ export enum PreprocessorType {
Rate = 'rate',
}
/**
* Annotation sub-query properties.
*/
export interface AnnotationQuery extends TimeSeriesList {
/**
* Annotation text.
*/
text?: string;
/**
* Annotation title.
*/
title?: string;
}
/**
* Time Series sub-query properties.
*/
@ -291,7 +285,7 @@ export enum AlignmentTypes {
}
/**
* @deprecated Use AnnotationQuery instead. Legacy annotation query properties for migration purposes.
* @deprecated Use TimeSeriesList instead. Legacy annotation query properties for migration purposes.
*/
export interface LegacyCloudMonitoringAnnotationQuery {
/**

View File

@ -24,6 +24,7 @@ import (
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/cloud-monitoring/kinds/dataquery"
)
var (
@ -55,10 +56,10 @@ var (
const (
gceAuthentication = "gce"
jwtAuthentication = "jwt"
annotationQueryType = "annotation"
timeSeriesListQueryType = "timeSeriesList"
timeSeriesQueryQueryType = "timeSeriesQuery"
sloQueryType = "slo"
annotationQueryType = dataquery.QueryTypeAnnotation
timeSeriesListQueryType = dataquery.QueryTypeTimeSeriesList
timeSeriesQueryQueryType = dataquery.QueryTypeTimeSeriesQuery
sloQueryType = dataquery.QueryTypeSlo
crossSeriesReducerDefault = "REDUCE_NONE"
perSeriesAlignerDefault = "ALIGN_MEAN"
)
@ -220,6 +221,10 @@ func migrateMetricTypeFilter(metricTypeFilter string, prevFilters interface{}) [
return metricTypeFilterArray
}
func strPtr(s string) *string {
return &s
}
func migrateRequest(req *backend.QueryDataRequest) error {
for i, q := range req.Queries {
var rawQuery map[string]interface{}
@ -233,12 +238,12 @@ func migrateRequest(req *backend.QueryDataRequest) error {
rawQuery["timeSeriesList"] == nil &&
rawQuery["sloQuery"] == nil {
// migrate legacy query
var mq timeSeriesList
var mq dataquery.TimeSeriesList
err = json.Unmarshal(q.JSON, &mq)
if err != nil {
return err
}
q.QueryType = timeSeriesListQueryType
q.QueryType = string(dataquery.QueryTypeTimeSeriesList)
gq := grafanaQuery{
TimeSeriesList: &mq,
}
@ -259,7 +264,7 @@ func migrateRequest(req *backend.QueryDataRequest) error {
// Migrate type to queryType, which is only used for annotations
if rawQuery["type"] != nil && rawQuery["type"].(string) == "annotationQuery" {
q.QueryType = annotationQueryType
q.QueryType = string(dataquery.QueryTypeAnnotation)
}
if rawQuery["queryType"] != nil {
q.QueryType = rawQuery["queryType"].(string)
@ -270,18 +275,18 @@ func migrateRequest(req *backend.QueryDataRequest) error {
metricQuery := rawQuery["metricQuery"].(map[string]interface{})
if metricQuery["editorMode"] != nil && toString(metricQuery["editorMode"]) == "mql" {
rawQuery["timeSeriesQuery"] = &timeSeriesQuery{
rawQuery["timeSeriesQuery"] = &dataquery.TimeSeriesQuery{
ProjectName: toString(metricQuery["projectName"]),
Query: toString(metricQuery["query"]),
GraphPeriod: toString(metricQuery["graphPeriod"]),
GraphPeriod: strPtr(toString(metricQuery["graphPeriod"])),
}
q.QueryType = timeSeriesQueryQueryType
q.QueryType = string(dataquery.QueryTypeTimeSeriesQuery)
} else {
tslb, err := json.Marshal(metricQuery)
if err != nil {
return err
}
tsl := &timeSeriesList{}
tsl := &dataquery.TimeSeriesList{}
err = json.Unmarshal(tslb, tsl)
if err != nil {
return err
@ -291,7 +296,7 @@ func migrateRequest(req *backend.QueryDataRequest) error {
tsl.Filters = migrateMetricTypeFilter(metricQuery["metricType"].(string), metricQuery["filters"])
}
rawQuery["timeSeriesList"] = tsl
q.QueryType = timeSeriesListQueryType
q.QueryType = string(dataquery.QueryTypeTimeSeriesList)
}
// AliasBy is now a top level property
if metricQuery["aliasBy"] != nil {
@ -304,7 +309,7 @@ func migrateRequest(req *backend.QueryDataRequest) error {
q.JSON = b
}
if rawQuery["sloQuery"] != nil && q.QueryType == sloQueryType {
if rawQuery["sloQuery"] != nil && q.QueryType == string(dataquery.QueryTypeSlo) {
sloQuery := rawQuery["sloQuery"].(map[string]interface{})
// AliasBy is now a top level property
if sloQuery["aliasBy"] != nil {
@ -347,7 +352,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
}
switch req.Queries[0].QueryType {
case annotationQueryType:
case string(dataquery.QueryTypeAnnotation):
return s.executeAnnotationQuery(ctx, req, *dsInfo, queries)
default:
return s.executeTimeSeriesQuery(ctx, req, *dsInfo, queries)
@ -396,19 +401,20 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR
var queryInterface cloudMonitoringQueryExecutor
switch query.QueryType {
case timeSeriesListQueryType, annotationQueryType:
case string(dataquery.QueryTypeTimeSeriesList), string(dataquery.QueryTypeAnnotation):
cmtsf := &cloudMonitoringTimeSeriesList{
refID: query.RefID,
logger: logger,
aliasBy: q.AliasBy,
}
if q.TimeSeriesList.View == "" {
q.TimeSeriesList.View = "FULL"
if q.TimeSeriesList.View == nil || *q.TimeSeriesList.View == "" {
fullString := "FULL"
q.TimeSeriesList.View = &fullString
}
cmtsf.parameters = q.TimeSeriesList
cmtsf.setParams(startTime, endTime, durationSeconds, query.Interval.Milliseconds())
queryInterface = cmtsf
case timeSeriesQueryQueryType:
case string(dataquery.QueryTypeTimeSeriesQuery):
queryInterface = &cloudMonitoringTimeSeriesQuery{
refID: query.RefID,
aliasBy: q.AliasBy,
@ -417,7 +423,7 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR
timeRange: req.Queries[0].TimeRange,
logger: logger,
}
case sloQueryType:
case string(dataquery.QueryTypeSlo):
cmslo := &cloudMonitoringSLO{
refID: query.RefID,
logger: logger,
@ -606,7 +612,7 @@ func unmarshalResponse(logger log.Logger, res *http.Response) (cloudMonitoringRe
return data, nil
}
func addConfigData(frames data.Frames, dl string, unit string, period string) data.Frames {
func addConfigData(frames data.Frames, dl string, unit string, period *string) data.Frames {
for i := range frames {
if frames[i].Fields[1].Config == nil {
frames[i].Fields[1].Config = &data.FieldConfig{}
@ -627,8 +633,8 @@ func addConfigData(frames data.Frames, dl string, unit string, period string) da
if frames[i].Fields[0].Config == nil {
frames[i].Fields[0].Config = &data.FieldConfig{}
}
if period != "" {
err := addInterval(period, frames[i].Fields[0])
if period != nil && *period != "" {
err := addInterval(*period, frames[i].Fields[0])
if err != nil {
slog.Error("Failed to add interval", "error", err)
}

View File

@ -14,6 +14,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/tsdb/cloud-monitoring/kinds/dataquery"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -1103,7 +1104,7 @@ func baseTimeSeriesList() *backend.QueryDataRequest {
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
QueryType: timeSeriesListQueryType,
QueryType: string(dataquery.QueryTypeTimeSeriesList),
JSON: json.RawMessage(`{
"timeSeriesList": {
"filters": ["metric.type=\"a/metric/type\""],
@ -1127,7 +1128,7 @@ func baseTimeSeriesQuery() *backend.QueryDataRequest {
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
QueryType: timeSeriesQueryQueryType,
QueryType: string(dataquery.QueryTypeTimeSeriesQuery),
JSON: json.RawMessage(`{
"queryType": "metrics",
"timeSeriesQuery": {

View File

@ -86,18 +86,6 @@ const (
// AlignmentTypes defines model for AlignmentTypes.
type AlignmentTypes string
// Annotation sub-query properties.
type AnnotationQuery struct {
// TimeSeriesList Time Series List sub-query properties.
TimeSeriesList
// Annotation text.
Text *string `json:"text,omitempty"`
// Annotation title.
Title *string `json:"title,omitempty"`
}
// CloudMonitoringQuery defines model for CloudMonitoringQuery.
type CloudMonitoringQuery struct {
// DataQuery These are the common properties available to all queries in all datasources.
@ -114,10 +102,8 @@ type CloudMonitoringQuery struct {
// SLO sub-query properties.
SloQuery *SLOQuery `json:"sloQuery,omitempty"`
// GCM query type.
// queryType: #QueryType
// Time Series List sub-query properties.
TimeSeriesList *any `json:"timeSeriesList,omitempty"`
TimeSeriesList *TimeSeriesList `json:"timeSeriesList,omitempty"`
// Time Series sub-query properties.
TimeSeriesQuery *TimeSeriesQuery `json:"timeSeriesQuery,omitempty"`
@ -166,7 +152,7 @@ type Filter struct {
// GoogleCloudMonitoringDataQuery defines model for GoogleCloudMonitoringDataQuery.
type GoogleCloudMonitoringDataQuery = map[string]any
// @deprecated Use AnnotationQuery instead. Legacy annotation query properties for migration purposes.
// @deprecated Use TimeSeriesList instead. Legacy annotation query properties for migration purposes.
type LegacyCloudMonitoringAnnotationQuery struct {
// Array of filters to query data by. Labels that can be filtered on are defined by the metric.
Filters []string `json:"filters"`
@ -305,6 +291,12 @@ type TimeSeriesList struct {
// Only present if a preprocessor is selected. Alignment function to be used. Defaults to ALIGN_MEAN.
SecondaryPerSeriesAligner *string `json:"secondaryPerSeriesAligner,omitempty"`
// Annotation text.
Text *string `json:"text,omitempty"`
// Annotation title.
Title *string `json:"title,omitempty"`
// Data view, defaults to FULL.
View *string `json:"view,omitempty"`
}

View File

@ -52,7 +52,7 @@ func (sloQ *cloudMonitoringSLO) getFilter() string {
sloName := fmt.Sprintf("projects/%s/services/%s/serviceLevelObjectives/%s", sloQ.parameters.ProjectName, sloQ.parameters.ServiceId, sloQ.parameters.SloId)
if sloQ.parameters.SelectorName == "select_slo_burn_rate" {
return fmt.Sprintf(`%s("%s", "%s")`, sloQ.parameters.SelectorName, sloName, sloQ.parameters.LookbackPeriod)
return fmt.Sprintf(`%s("%s", "%s")`, sloQ.parameters.SelectorName, sloName, *sloQ.parameters.LookbackPeriod)
} else {
return fmt.Sprintf(`%s("%s")`, sloQ.parameters.SelectorName, sloName)
}
@ -65,7 +65,7 @@ func (sloQ *cloudMonitoringSLO) setParams(startTime time.Time, endTime time.Time
params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339))
params.Add("filter", sloQ.getFilter())
params.Add("aggregation.alignmentPeriod", calculateAlignmentPeriod(sloQ.parameters.AlignmentPeriod, intervalMs, durationSeconds))
params.Add("aggregation.alignmentPeriod", calculateAlignmentPeriod(*sloQ.parameters.AlignmentPeriod, intervalMs, durationSeconds))
if sloQ.parameters.SelectorName == "select_slo_health" {
params.Add("aggregation.perSeriesAligner", "ALIGN_MEAN")
} else {

View File

@ -5,6 +5,7 @@ import (
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/cloud-monitoring/kinds/dataquery"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -20,7 +21,7 @@ func SLOQuery(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringSLO{
params: url.Values{},
parameters: &sloQuery{
parameters: &dataquery.SLOQuery{
ProjectName: "test-proj",
SelectorName: "select_slo_compliance",
ServiceId: "test-service",
@ -45,7 +46,7 @@ func SLOQuery(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringSLO{
params: url.Values{},
parameters: &sloQuery{
parameters: &dataquery.SLOQuery{
ProjectName: "test-proj",
SelectorName: "select_slo_compliance",
ServiceId: "test-service",
@ -66,7 +67,7 @@ func SLOQuery(t *testing.T) {
assert.Equal(t, 1, len(data.TimeSeries))
res := &backend.DataResponse{}
query := &cloudMonitoringSLO{params: url.Values{}, parameters: &sloQuery{SloId: "yes"}}
query := &cloudMonitoringSLO{params: url.Values{}, parameters: &dataquery.SLOQuery{SloId: "yes"}}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
frames := res.Frames

View File

@ -12,6 +12,7 @@ import (
"github.com/huandu/xstrings"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/cloud-monitoring/kinds/dataquery"
)
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) run(ctx context.Context, req *backend.QueryDataRequest,
@ -45,7 +46,8 @@ func parseTimeSeriesResponse(queryRes *backend.DataResponse,
}
if len(response.TimeSeries) > 0 {
dl := query.buildDeepLink()
frames = addConfigData(frames, dl, response.Unit, params.Get("aggregation.alignmentPeriod"))
aggregationAlignmentString := params.Get("aggregation.alignmentPeriod")
frames = addConfigData(frames, dl, response.Unit, &aggregationAlignmentString)
}
queryRes.Frames = frames
@ -151,11 +153,11 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesList) setPreprocessor() {
// In case a preprocessor is defined, the preprocessor becomes the primary aggregation
// and the aggregation that is specified in the UI becomes the secondary aggregation
// Rules are specified in this issue: https://github.com/grafana/grafana/issues/30866
t := toPreprocessorType(timeSeriesFilter.parameters.Preprocessor)
if t != PreprocessorTypeNone {
// Move aggregation to secondaryAggregation
if timeSeriesFilter.parameters.Preprocessor != nil && toPreprocessorType(string(*timeSeriesFilter.parameters.Preprocessor)) != PreprocessorTypeNone {
// Move aggregation to secondaryAggregations
timeSeriesFilter.parameters.SecondaryAlignmentPeriod = timeSeriesFilter.parameters.AlignmentPeriod
timeSeriesFilter.parameters.SecondaryCrossSeriesReducer = timeSeriesFilter.parameters.CrossSeriesReducer
scsr := timeSeriesFilter.parameters.CrossSeriesReducer
timeSeriesFilter.parameters.SecondaryCrossSeriesReducer = &scsr
timeSeriesFilter.parameters.SecondaryPerSeriesAligner = timeSeriesFilter.parameters.PerSeriesAligner
timeSeriesFilter.parameters.SecondaryGroupBys = timeSeriesFilter.parameters.GroupBys
@ -166,10 +168,10 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesList) setPreprocessor() {
// Set aligner based on preprocessor type
aligner := "ALIGN_RATE"
if t == PreprocessorTypeDelta {
if timeSeriesFilter.parameters.Preprocessor != nil && toPreprocessorType(string(*timeSeriesFilter.parameters.Preprocessor)) == PreprocessorTypeDelta {
aligner = "ALIGN_DELTA"
}
timeSeriesFilter.parameters.PerSeriesAligner = aligner
timeSeriesFilter.parameters.PerSeriesAligner = &aligner
}
}
@ -181,39 +183,52 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesList) setParams(startTime time.
params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339))
params.Add("filter", timeSeriesFilter.getFilter())
params.Add("view", query.View)
if query.View != nil {
params.Add("view", *query.View)
}
if query.CrossSeriesReducer == "" {
query.CrossSeriesReducer = crossSeriesReducerDefault
}
if query.PerSeriesAligner == "" {
query.PerSeriesAligner = perSeriesAlignerDefault
alignMean := perSeriesAlignerDefault
if query.PerSeriesAligner == nil {
query.PerSeriesAligner = &alignMean
}
if timeSeriesFilter.parameters.Preprocessor == nil {
var p dataquery.PreprocessorType = ""
timeSeriesFilter.parameters.Preprocessor = &p
}
alignmentPeriodString := ""
if query.AlignmentPeriod != nil {
alignmentPeriodString = *query.AlignmentPeriod
}
timeSeriesFilter.setPreprocessor()
alignmentPeriod := calculateAlignmentPeriod(query.AlignmentPeriod, intervalMs, durationSeconds)
alignmentPeriod := calculateAlignmentPeriod(alignmentPeriodString, intervalMs, durationSeconds)
params.Add("aggregation.alignmentPeriod", alignmentPeriod)
if query.CrossSeriesReducer != "" {
params.Add("aggregation.crossSeriesReducer", query.CrossSeriesReducer)
}
if query.PerSeriesAligner != "" {
params.Add("aggregation.perSeriesAligner", query.PerSeriesAligner)
if query.PerSeriesAligner != nil {
params.Add("aggregation.perSeriesAligner", *query.PerSeriesAligner)
}
for _, groupBy := range query.GroupBys {
params.Add("aggregation.groupByFields", groupBy)
}
if query.SecondaryAlignmentPeriod != "" {
secondaryAlignmentPeriod := calculateAlignmentPeriod(query.AlignmentPeriod, intervalMs, durationSeconds)
if query.SecondaryAlignmentPeriod != nil && *query.SecondaryAlignmentPeriod != "" {
secondaryAlignmentPeriod := calculateAlignmentPeriod(alignmentPeriodString, intervalMs, durationSeconds)
params.Add("secondaryAggregation.alignmentPeriod", secondaryAlignmentPeriod)
}
if query.SecondaryCrossSeriesReducer != "" {
params.Add("secondaryAggregation.crossSeriesReducer", query.SecondaryCrossSeriesReducer)
if query.SecondaryCrossSeriesReducer != nil && *query.SecondaryCrossSeriesReducer != "" {
params.Add("secondaryAggregation.crossSeriesReducer", *query.SecondaryCrossSeriesReducer)
}
if query.SecondaryPerSeriesAligner != "" {
params.Add("secondaryAggregation.perSeriesAligner", query.SecondaryPerSeriesAligner)
if query.SecondaryPerSeriesAligner != nil {
params.Add("secondaryAggregation.perSeriesAligner", *query.SecondaryPerSeriesAligner)
}
for _, groupBy := range query.SecondaryGroupBys {
params.Add("secondaryAggregation.groupByFields", groupBy)

View File

@ -12,6 +12,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
sdkdata "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/tsdb/cloud-monitoring/kinds/dataquery"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -19,7 +20,7 @@ import (
func TestTimeSeriesFilter(t *testing.T) {
t.Run("parses params", func(t *testing.T) {
query := &cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{}}
query := &cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{}}
query.setParams(time.Time{}, time.Time{}, 0, 0)
assert.Equal(t, "0001-01-01T00:00:00Z", query.params.Get("interval.startTime"))
@ -37,7 +38,8 @@ func TestTimeSeriesFilter(t *testing.T) {
})
t.Run("parses params with preprocessor", func(t *testing.T) {
query := &cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{Preprocessor: "rate"}}
var r dataquery.PreprocessorType = "rate"
query := &cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{Preprocessor: &r}}
query.setParams(time.Time{}, time.Time{}, 0, 0)
assert.Equal(t, "0001-01-01T00:00:00Z", query.params.Get("interval.startTime"))
@ -60,7 +62,7 @@ func TestTimeSeriesFilter(t *testing.T) {
assert.Equal(t, 1, len(data.TimeSeries))
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &dataquery.TimeSeriesList{}}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
frames := res.Frames
@ -83,7 +85,7 @@ func TestTimeSeriesFilter(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, 3, len(data.TimeSeries))
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &dataquery.TimeSeriesList{}}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
@ -123,7 +125,7 @@ func TestTimeSeriesFilter(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, 3, len(data.TimeSeries))
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{GroupBys: []string{
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &dataquery.TimeSeriesList{GroupBys: []string{
"metric.label.instance_name", "resource.label.zone",
}}}
err = query.parseResponse(res, data, "")
@ -146,7 +148,7 @@ func TestTimeSeriesFilter(t *testing.T) {
t.Run("and the alias pattern is for metric type, a metric label and a resource label", func(t *testing.T) {
query := &cloudMonitoringTimeSeriesList{
params: url.Values{},
parameters: &timeSeriesList{
parameters: &dataquery.TimeSeriesList{
GroupBys: []string{"metric.label.instance_name", "resource.label.zone"},
},
aliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}",
@ -165,7 +167,7 @@ func TestTimeSeriesFilter(t *testing.T) {
t.Run("and the alias pattern is for metric name", func(t *testing.T) {
query := &cloudMonitoringTimeSeriesList{
params: url.Values{},
parameters: &timeSeriesList{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}},
parameters: &dataquery.TimeSeriesList{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}},
aliasBy: "metric {{metric.name}} service {{metric.service}}",
}
err = query.parseResponse(res, data, "")
@ -187,7 +189,7 @@ func TestTimeSeriesFilter(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{
params: url.Values{},
parameters: &timeSeriesList{},
parameters: &dataquery.TimeSeriesList{},
aliasBy: "{{bucket}}",
}
err = query.parseResponse(res, data, "")
@ -232,7 +234,7 @@ func TestTimeSeriesFilter(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{
params: url.Values{},
parameters: &timeSeriesList{},
parameters: &dataquery.TimeSeriesList{},
aliasBy: "{{bucket}}",
}
err = query.parseResponse(res, data, "")
@ -270,7 +272,7 @@ func TestTimeSeriesFilter(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{
params: url.Values{},
parameters: &timeSeriesList{},
parameters: &dataquery.TimeSeriesList{},
aliasBy: "{{bucket}}",
}
err = query.parseResponse(res, data, "")
@ -310,7 +312,7 @@ func TestTimeSeriesFilter(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{
params: url.Values{},
parameters: &timeSeriesList{},
parameters: &dataquery.TimeSeriesList{},
aliasBy: "{{metadata.system_labels.test}}",
}
err = query.parseResponse(res, data, "")
@ -328,7 +330,7 @@ func TestTimeSeriesFilter(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{
params: url.Values{},
parameters: &timeSeriesList{},
parameters: &dataquery.TimeSeriesList{},
aliasBy: "{{metadata.system_labels.test2}}",
}
err = query.parseResponse(res, data, "")
@ -346,7 +348,7 @@ func TestTimeSeriesFilter(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, 1, len(data.TimeSeries))
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &dataquery.TimeSeriesList{}}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
frames := res.Frames
@ -359,7 +361,7 @@ func TestTimeSeriesFilter(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, 3, len(data.TimeSeries))
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &dataquery.TimeSeriesList{}}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
frames := res.Frames
@ -379,7 +381,7 @@ func TestTimeSeriesFilter(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesQuery{
parameters: &timeSeriesQuery{
parameters: &dataquery.TimeSeriesQuery{
ProjectName: "test-proj",
Query: "test-query",
},
@ -404,10 +406,10 @@ func TestTimeSeriesFilter(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesQuery{
parameters: &timeSeriesQuery{
parameters: &dataquery.TimeSeriesQuery{
Query: "fetch gce_instance::compute.googleapis.com/instance/cpu/utilization | sum",
ProjectName: "test",
GraphPeriod: "60s",
GraphPeriod: strPtr("60s"),
},
}
err = query.parseResponse(res, data, "")
@ -420,7 +422,7 @@ func TestTimeSeriesFilter(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, 3, len(data.TimeSeries))
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &dataquery.TimeSeriesList{}}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
frames := res.Frames
@ -438,7 +440,7 @@ func TestTimeSeriesFilter(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesList{params: url.Values{
"aggregation.alignmentPeriod": []string{"+60s"},
}, parameters: &timeSeriesList{}}
}, parameters: &dataquery.TimeSeriesList{}}
err = query.parseResponse(res, data, "")
require.NoError(t, err)
frames := res.Frames
@ -453,7 +455,7 @@ func TestTimeSeriesFilter(t *testing.T) {
assert.Equal(t, 1, len(data.TimeSeries))
res := &backend.DataResponse{}
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
require.NotNil(t, res.Frames[0].Meta)
assert.Equal(t, sdkdata.FrameMeta{
@ -476,7 +478,7 @@ func TestTimeSeriesFilter(t *testing.T) {
assert.Equal(t, 1, len(data.TimeSeries))
res := &backend.DataResponse{}
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
require.NotNil(t, res.Frames[0].Meta)
assert.Equal(t, sdkdata.FrameMeta{
@ -499,7 +501,7 @@ func TestTimeSeriesFilter(t *testing.T) {
assert.Equal(t, 1, len(data.TimeSeries))
res := &backend.DataResponse{}
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
require.NotNil(t, res.Frames[0].Meta)
assert.Equal(t, sdkdata.FrameMeta{
@ -520,20 +522,20 @@ func TestTimeSeriesFilter(t *testing.T) {
t.Run("when building filter string", func(t *testing.T) {
t.Run("and there's no regex operator", func(t *testing.T) {
t.Run("and there are wildcards in a filter value", func(t *testing.T) {
tsl := &cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{Filters: []string{"metric.type", "=", "somemetrictype", "AND", "zone", "=", "*-central1*"}}}
tsl := &cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{Filters: []string{"metric.type", "=", "somemetrictype", "AND", "zone", "=", "*-central1*"}}}
value := tsl.getFilter()
assert.Equal(t, `metric.type="somemetrictype" zone=has_substring("-central1")`, value)
})
t.Run("and there are no wildcards in any filter value", func(t *testing.T) {
tsl := &cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{Filters: []string{"metric.type", "=", "somemetrictype", "AND", "zone", "!=", "us-central1-a"}}}
tsl := &cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{Filters: []string{"metric.type", "=", "somemetrictype", "AND", "zone", "!=", "us-central1-a"}}}
value := tsl.getFilter()
assert.Equal(t, `metric.type="somemetrictype" zone!="us-central1-a"`, value)
})
})
t.Run("and there is a regex operator", func(t *testing.T) {
tsl := &cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{Filters: []string{"metric.type", "=", "somemetrictype", "AND", "zone", "=~", "us-central1-a~"}}}
tsl := &cloudMonitoringTimeSeriesList{parameters: &dataquery.TimeSeriesList{Filters: []string{"metric.type", "=", "somemetrictype", "AND", "zone", "=~", "us-central1-a~"}}}
value := tsl.getFilter()
assert.NotContains(t, value, `=~`)
assert.Contains(t, value, `zone=`)

View File

@ -16,13 +16,13 @@ import (
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) appendGraphPeriod(req *backend.QueryDataRequest) string {
// GraphPeriod needs to be explicitly disabled.
// If not set, the default behavior is to set an automatic value
if timeSeriesQuery.parameters.GraphPeriod != "disabled" {
if timeSeriesQuery.parameters.GraphPeriod == "auto" || timeSeriesQuery.parameters.GraphPeriod == "" {
if timeSeriesQuery.parameters.GraphPeriod == nil || *timeSeriesQuery.parameters.GraphPeriod != "disabled" {
if timeSeriesQuery.parameters.GraphPeriod == nil || *timeSeriesQuery.parameters.GraphPeriod == "auto" || *timeSeriesQuery.parameters.GraphPeriod == "" {
intervalCalculator := intervalv2.NewCalculator(intervalv2.CalculatorOptions{})
interval := intervalCalculator.Calculate(req.Queries[0].TimeRange, time.Duration(timeSeriesQuery.IntervalMS/1000)*time.Second, req.Queries[0].MaxDataPoints)
timeSeriesQuery.parameters.GraphPeriod = interval.Text
timeSeriesQuery.parameters.GraphPeriod = &interval.Text
}
return fmt.Sprintf(" | graph_period %s", timeSeriesQuery.parameters.GraphPeriod)
return fmt.Sprintf(" | graph_period %s", *timeSeriesQuery.parameters.GraphPeriod)
}
return ""
}

View File

@ -6,6 +6,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
gdata "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/tsdb/cloud-monitoring/kinds/dataquery"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -23,7 +24,7 @@ func TestTimeSeriesQuery(t *testing.T) {
t.Run("and alias template is not specified", func(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesQuery{
parameters: &timeSeriesQuery{
parameters: &dataquery.TimeSeriesQuery{
ProjectName: "test-proj",
Query: "test-query",
},
@ -41,7 +42,7 @@ func TestTimeSeriesQuery(t *testing.T) {
t.Run("and alias template is specified", func(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesQuery{
parameters: &timeSeriesQuery{
parameters: &dataquery.TimeSeriesQuery{
ProjectName: "test-proj",
Query: "test-query",
},
@ -68,7 +69,7 @@ func TestTimeSeriesQuery(t *testing.T) {
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesQuery{
parameters: &timeSeriesQuery{
parameters: &dataquery.TimeSeriesQuery{
ProjectName: "test-proj",
Query: "test-query",
},
@ -93,7 +94,7 @@ func TestTimeSeriesQuery(t *testing.T) {
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesQuery{
parameters: &timeSeriesQuery{
parameters: &dataquery.TimeSeriesQuery{
ProjectName: "test-proj",
Query: "test-query",
},
@ -120,10 +121,10 @@ func TestTimeSeriesQuery(t *testing.T) {
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
res := &backend.DataResponse{}
query := &cloudMonitoringTimeSeriesQuery{
parameters: &timeSeriesQuery{
parameters: &dataquery.TimeSeriesQuery{
ProjectName: "test-proj",
Query: "test-query",
GraphPeriod: "60s",
GraphPeriod: strPtr("60s"),
},
timeRange: backend.TimeRange{
From: fromStart,
@ -138,12 +139,12 @@ func TestTimeSeriesQuery(t *testing.T) {
})
t.Run("appends graph_period to the query", func(t *testing.T) {
query := &cloudMonitoringTimeSeriesQuery{parameters: &timeSeriesQuery{}}
query := &cloudMonitoringTimeSeriesQuery{parameters: &dataquery.TimeSeriesQuery{}}
assert.Equal(t, query.appendGraphPeriod(&backend.QueryDataRequest{Queries: []backend.DataQuery{{}}}), " | graph_period 1ms")
})
t.Run("skips graph_period if disabled", func(t *testing.T) {
query := &cloudMonitoringTimeSeriesQuery{parameters: &timeSeriesQuery{GraphPeriod: "disabled"}}
query := &cloudMonitoringTimeSeriesQuery{parameters: &dataquery.TimeSeriesQuery{GraphPeriod: strPtr("disabled")}}
assert.Equal(t, query.appendGraphPeriod(&backend.QueryDataRequest{Queries: []backend.DataQuery{{}}}), "")
})
}

View File

@ -14,6 +14,7 @@ import (
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/cloud-monitoring/kinds/dataquery"
)
type (
@ -30,57 +31,17 @@ type (
// Plugin API query data request used to generate
// a cloudMonitoringTimeSeriesList or cloudMonitoringTimeSeriesQuery
grafanaQuery struct {
AliasBy string `json:"aliasBy"`
TimeSeriesList *timeSeriesList `json:"timeSeriesList,omitempty"`
TimeSeriesQuery *timeSeriesQuery `json:"timeSeriesQuery,omitempty"`
SloQuery *sloQuery `json:"sloQuery,omitempty"`
AliasBy string `json:"aliasBy"`
TimeSeriesList *dataquery.TimeSeriesList `json:"timeSeriesList,omitempty"`
TimeSeriesQuery *dataquery.TimeSeriesQuery `json:"timeSeriesQuery,omitempty"`
SloQuery *dataquery.SLOQuery `json:"sloQuery,omitempty"`
}
// These should reflect GCM APIs
// timeSeries.list https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list
timeSeriesList struct {
ProjectName string `json:"projectName"`
CrossSeriesReducer string `json:"crossSeriesReducer"`
AlignmentPeriod string `json:"alignmentPeriod"`
PerSeriesAligner string `json:"perSeriesAligner"`
GroupBys []string `json:"groupBys"`
Filters []string `json:"filters"`
View string `json:"view"`
SecondaryAlignmentPeriod string `json:"secondaryAlignmentPeriod"`
SecondaryCrossSeriesReducer string `json:"secondaryCrossSeriesReducer"`
SecondaryPerSeriesAligner string `json:"secondaryPerSeriesAligner"`
SecondaryGroupBys []string `json:"secondaryGroupBys"`
// Preprocessor is not part of the GCM API but added for simplicity
// It will overwrite AligmentPeriod, CrossSeriesReducer, PerSeriesAligner, GroupBys
// and its secondary counterparts
Preprocessor string `json:"preprocessor"`
}
// sloQuery is an internal convention but the API is the same as timeSeriesList
sloQuery struct {
ProjectName string `json:"projectName"`
SelectorName string `json:"selectorName"`
ServiceId string `json:"serviceId"`
SloId string `json:"sloId"`
AlignmentPeriod string `json:"alignmentPeriod"`
LookbackPeriod string `json:"lookbackPeriod"`
}
// timeSeries.query https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/query
timeSeriesQuery struct {
ProjectName string `json:"projectName"`
Query string `json:"query"`
// Not part of the GCM API, will be added to Query
GraphPeriod string `json:"graphPeriod"`
}
// Internal structs. Include computed values
// cloudMonitoringTimeSeriesList is used to build time series with a filter
cloudMonitoringTimeSeriesList struct {
refID string
aliasBy string
logger log.Logger
parameters *timeSeriesList
parameters *dataquery.TimeSeriesList
// Processed properties
params url.Values
}
@ -89,7 +50,7 @@ type (
refID string
aliasBy string
logger log.Logger
parameters *sloQuery
parameters *dataquery.SLOQuery
// Processed properties
params url.Values
}
@ -99,7 +60,7 @@ type (
refID string
aliasBy string
logger log.Logger
parameters *timeSeriesQuery
parameters *dataquery.TimeSeriesQuery
// Processed properties
timeRange backend.TimeRange
IntervalMS int64

View File

@ -6,7 +6,7 @@ import { EditorField, EditorRows } from '@grafana/experimental';
import { Input } from '@grafana/ui';
import CloudMonitoringDatasource from '../datasource';
import { AnnotationQuery, CloudMonitoringQuery, QueryType } from '../types/query';
import { TimeSeriesList, CloudMonitoringQuery, QueryType } from '../types/query';
import { CloudMonitoringOptions } from '../types/types';
import { MetricQueryEditor, defaultTimeSeriesList } from './MetricQueryEditor';
@ -15,7 +15,7 @@ import { AnnotationsHelp } from './';
export type Props = QueryEditorProps<CloudMonitoringDatasource, CloudMonitoringQuery, CloudMonitoringOptions>;
export const defaultQuery: (datasource: CloudMonitoringDatasource) => AnnotationQuery = (datasource) => ({
export const defaultQuery: (datasource: CloudMonitoringDatasource) => TimeSeriesList = (datasource) => ({
...defaultTimeSeriesList(datasource),
title: '',
text: '',

View File

@ -31,7 +31,7 @@ composableKinds: DataQuery: {
// GCM query type.
// queryType: #QueryType
// Time Series List sub-query properties.
timeSeriesList?: #TimeSeriesList | #AnnotationQuery
timeSeriesList?: #TimeSeriesList
// Time Series sub-query properties.
timeSeriesQuery?: #TimeSeriesQuery
// SLO sub-query properties.
@ -60,6 +60,11 @@ composableKinds: DataQuery: {
// Data view, defaults to FULL.
view?: string
// Annotation title.
title?: string
// Annotation text.
text?: string
// Only present if a preprocessor is selected. Reducer applied across a set of time-series values. Defaults to REDUCE_NONE.
secondaryCrossSeriesReducer?: string
// Only present if a preprocessor is selected. Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto.
@ -77,14 +82,6 @@ composableKinds: DataQuery: {
// Types of pre-processor available. Defined by the metric.
#PreprocessorType: "none" | "rate" | "delta" @cuetsy(kind="enum")
// Annotation sub-query properties.
#AnnotationQuery: #TimeSeriesList & {
// Annotation title.
title?: string
// Annotation text.
text?: string
} @cuetsy(kind="interface")
// Time Series sub-query properties.
#TimeSeriesQuery: {
// GCP project to execute the query against.
@ -154,7 +151,7 @@ composableKinds: DataQuery: {
#AlignmentTypes: "ALIGN_DELTA" | "ALIGN_RATE" | "ALIGN_INTERPOLATE" | "ALIGN_NEXT_OLDER" | "ALIGN_MIN" | "ALIGN_MAX" | "ALIGN_MEAN" | "ALIGN_COUNT" | "ALIGN_SUM" | "ALIGN_STDDEV" | "ALIGN_COUNT_TRUE" | "ALIGN_COUNT_FALSE" | "ALIGN_FRACTION_TRUE" | "ALIGN_PERCENTILE_99" | "ALIGN_PERCENTILE_95" | "ALIGN_PERCENTILE_50" | "ALIGN_PERCENTILE_05" | "ALIGN_PERCENT_CHANGE" | "ALIGN_NONE" @cuetsy(kind="enum")
// @deprecated Use AnnotationQuery instead. Legacy annotation query properties for migration purposes.
// @deprecated Use TimeSeriesList instead. Legacy annotation query properties for migration purposes.
#LegacyCloudMonitoringAnnotationQuery: {
// GCP project to execute the query against.
projectName: string

View File

@ -28,7 +28,7 @@ export interface CloudMonitoringQuery extends common.DataQuery {
* queryType: #QueryType
* Time Series List sub-query properties.
*/
timeSeriesList?: (TimeSeriesList | AnnotationQuery);
timeSeriesList?: TimeSeriesList;
/**
* Time Series sub-query properties.
*/
@ -93,6 +93,14 @@ export interface TimeSeriesList {
* Only present if a preprocessor is selected. Alignment function to be used. Defaults to ALIGN_MEAN.
*/
secondaryPerSeriesAligner?: string;
/**
* Annotation text.
*/
text?: string;
/**
* Annotation title.
*/
title?: string;
/**
* Data view, defaults to FULL.
*/
@ -114,20 +122,6 @@ export enum PreprocessorType {
Rate = 'rate',
}
/**
* Annotation sub-query properties.
*/
export interface AnnotationQuery extends TimeSeriesList {
/**
* Annotation text.
*/
text?: string;
/**
* Annotation title.
*/
title?: string;
}
/**
* Time Series sub-query properties.
*/
@ -288,7 +282,7 @@ export enum AlignmentTypes {
}
/**
* @deprecated Use AnnotationQuery instead. Legacy annotation query properties for migration purposes.
* @deprecated Use TimeSeriesList instead. Legacy annotation query properties for migration purposes.
*/
export interface LegacyCloudMonitoringAnnotationQuery {
/**

View File

@ -4,7 +4,6 @@ export { QueryType };
export {
TimeSeriesList,
PreprocessorType,
AnnotationQuery,
TimeSeriesQuery,
SLOQuery,
MetricQuery,