mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
GoogleCloudMonitoring: Refactor types (#58644)
This commit is contained in:
parent
16af756d50
commit
64143ea7d0
@ -62,7 +62,6 @@ const (
|
||||
annotationQueryType = "annotation"
|
||||
metricQueryType = "metrics"
|
||||
sloQueryType = "slo"
|
||||
mqlEditorMode = "mql"
|
||||
crossSeriesReducerDefault = "REDUCE_NONE"
|
||||
perSeriesAlignerDefault = "ALIGN_MEAN"
|
||||
)
|
||||
@ -217,16 +216,20 @@ func migrateRequest(req *backend.QueryDataRequest) error {
|
||||
|
||||
if rawQuery["metricQuery"] == nil {
|
||||
// migrate legacy query
|
||||
var mq metricQuery
|
||||
var mq timeSeriesList
|
||||
err = json.Unmarshal(q.JSON, &mq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
q.QueryType = metricQueryType
|
||||
gq := grafanaQuery{
|
||||
TimeSeriesList: &mq,
|
||||
}
|
||||
if rawQuery["aliasBy"] != nil {
|
||||
gq.AliasBy = rawQuery["aliasBy"].(string)
|
||||
}
|
||||
|
||||
b, err := json.Marshal(grafanaQuery{
|
||||
QueryType: metricQueryType,
|
||||
MetricQuery: mq,
|
||||
})
|
||||
b, err := json.Marshal(gq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -237,6 +240,62 @@ func migrateRequest(req *backend.QueryDataRequest) error {
|
||||
if rawQuery["type"] != nil && rawQuery["type"].(string) == "annotationQuery" {
|
||||
q.QueryType = annotationQueryType
|
||||
}
|
||||
if rawQuery["queryType"] != nil {
|
||||
q.QueryType = rawQuery["queryType"].(string)
|
||||
}
|
||||
|
||||
// Metric query was divided between timeSeriesList and timeSeriesQuery API calls
|
||||
if rawQuery["metricQuery"] != nil {
|
||||
metricQuery := rawQuery["metricQuery"].(map[string]interface{})
|
||||
|
||||
if metricQuery["editorMode"] != nil && toString(metricQuery["editorMode"]) == "mql" {
|
||||
rawQuery["timeSeriesQuery"] = &timeSeriesQuery{
|
||||
ProjectName: toString(metricQuery["projectName"]),
|
||||
Query: toString(metricQuery["query"]),
|
||||
GraphPeriod: toString(metricQuery["graphPeriod"]),
|
||||
}
|
||||
} else {
|
||||
rawQuery["timeSeriesList"] = metricQuery
|
||||
}
|
||||
if metricQuery["aliasBy"] != nil {
|
||||
rawQuery["aliasBy"] = metricQuery["aliasBy"]
|
||||
}
|
||||
b, err := json.Marshal(rawQuery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if q.QueryType == "" {
|
||||
q.QueryType = metricQueryType
|
||||
}
|
||||
q.JSON = b
|
||||
}
|
||||
|
||||
// SloQuery was merged into timeSeriesList
|
||||
if rawQuery["sloQuery"] != nil {
|
||||
if rawQuery["timeSeriesList"] == nil {
|
||||
rawQuery["timeSeriesList"] = map[string]interface{}{}
|
||||
}
|
||||
tsl := rawQuery["timeSeriesList"].(map[string]interface{})
|
||||
sloq := rawQuery["sloQuery"].(map[string]interface{})
|
||||
if sloq["projectName"] != nil {
|
||||
tsl["projectName"] = sloq["projectName"]
|
||||
}
|
||||
if sloq["alignmentPeriod"] != nil {
|
||||
tsl["alignmentPeriod"] = sloq["alignmentPeriod"]
|
||||
}
|
||||
if sloq["perSeriesAligner"] != nil {
|
||||
tsl["perSeriesAligner"] = sloq["perSeriesAligner"]
|
||||
}
|
||||
rawQuery["timeSeriesList"] = tsl
|
||||
b, err := json.Marshal(rawQuery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if q.QueryType == "" {
|
||||
q.QueryType = sloQueryType
|
||||
}
|
||||
q.JSON = b
|
||||
}
|
||||
|
||||
req.Queries[i] = q
|
||||
}
|
||||
@ -315,58 +374,49 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR
|
||||
return nil, fmt.Errorf("could not unmarshal CloudMonitoringQuery json: %w", err)
|
||||
}
|
||||
|
||||
q.MetricQuery.PreprocessorType = toPreprocessorType(q.MetricQuery.Preprocessor)
|
||||
var target string
|
||||
params := url.Values{}
|
||||
params.Add("interval.startTime", startTime.UTC().Format(time.RFC3339))
|
||||
params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339))
|
||||
|
||||
var queryInterface cloudMonitoringQueryExecutor
|
||||
cmtsf := &cloudMonitoringTimeSeriesFilter{
|
||||
RefID: query.RefID,
|
||||
GroupBys: []string{},
|
||||
logger: logger,
|
||||
cmtsf := &cloudMonitoringTimeSeriesList{
|
||||
refID: query.RefID,
|
||||
logger: logger,
|
||||
aliasBy: q.AliasBy,
|
||||
}
|
||||
switch q.QueryType {
|
||||
switch query.QueryType {
|
||||
case metricQueryType, annotationQueryType:
|
||||
if q.MetricQuery.EditorMode == mqlEditorMode {
|
||||
if q.TimeSeriesQuery != nil {
|
||||
queryInterface = &cloudMonitoringTimeSeriesQuery{
|
||||
RefID: query.RefID,
|
||||
ProjectName: q.MetricQuery.ProjectName,
|
||||
Query: q.MetricQuery.Query,
|
||||
IntervalMS: query.Interval.Milliseconds(),
|
||||
AliasBy: q.MetricQuery.AliasBy,
|
||||
timeRange: req.Queries[0].TimeRange,
|
||||
GraphPeriod: q.MetricQuery.GraphPeriod,
|
||||
refID: query.RefID,
|
||||
aliasBy: q.AliasBy,
|
||||
parameters: q.TimeSeriesQuery,
|
||||
IntervalMS: query.Interval.Milliseconds(),
|
||||
timeRange: req.Queries[0].TimeRange,
|
||||
}
|
||||
} else {
|
||||
cmtsf.AliasBy = q.MetricQuery.AliasBy
|
||||
cmtsf.ProjectName = q.MetricQuery.ProjectName
|
||||
cmtsf.GroupBys = append(cmtsf.GroupBys, q.MetricQuery.GroupBys...)
|
||||
if q.MetricQuery.View == "" {
|
||||
q.MetricQuery.View = "FULL"
|
||||
} else if q.TimeSeriesList != nil {
|
||||
if q.TimeSeriesList.View == "" {
|
||||
q.TimeSeriesList.View = "FULL"
|
||||
}
|
||||
params.Add("filter", buildFilterString(q.MetricQuery.MetricType, q.MetricQuery.Filters))
|
||||
params.Add("view", q.MetricQuery.View)
|
||||
setMetricAggParams(¶ms, &q.MetricQuery, durationSeconds, query.Interval.Milliseconds())
|
||||
cmtsf.parameters = q.TimeSeriesList
|
||||
params.Add("filter", buildFilterString(q.TimeSeriesList.MetricType, q.TimeSeriesList.Filters))
|
||||
params.Add("view", q.TimeSeriesList.View)
|
||||
setMetricAggParams(¶ms, q.TimeSeriesList, durationSeconds, query.Interval.Milliseconds())
|
||||
queryInterface = cmtsf
|
||||
} else {
|
||||
return nil, fmt.Errorf("missing query info")
|
||||
}
|
||||
case sloQueryType:
|
||||
cmtsf.AliasBy = q.SloQuery.AliasBy
|
||||
cmtsf.ProjectName = q.SloQuery.ProjectName
|
||||
cmtsf.Selector = q.SloQuery.SelectorName
|
||||
cmtsf.Service = q.SloQuery.ServiceId
|
||||
cmtsf.Slo = q.SloQuery.SloId
|
||||
params.Add("filter", buildSLOFilterExpression(q.SloQuery))
|
||||
setSloAggParams(¶ms, &q.SloQuery, durationSeconds, query.Interval.Milliseconds())
|
||||
cmtsf.sloQ = q.SloQuery
|
||||
cmtsf.parameters = q.TimeSeriesList
|
||||
params.Add("filter", buildSLOFilterExpression(q.TimeSeriesList.ProjectName, q.SloQuery))
|
||||
setSloAggParams(¶ms, q.SloQuery, q.TimeSeriesList.AlignmentPeriod, durationSeconds, query.Interval.Milliseconds())
|
||||
queryInterface = cmtsf
|
||||
default:
|
||||
return nil, fmt.Errorf("unrecognized query type %q", q.QueryType)
|
||||
return nil, fmt.Errorf("unrecognized query type %q", query.QueryType)
|
||||
}
|
||||
|
||||
target = params.Encode()
|
||||
cmtsf.Target = target
|
||||
cmtsf.Params = params
|
||||
cmtsf.params = params
|
||||
|
||||
if setting.Env == setting.Dev {
|
||||
logger.Debug("CloudMonitoring request", "params", params)
|
||||
@ -428,8 +478,8 @@ func buildFilterString(metricType string, filterParts []string) string {
|
||||
return strings.Trim(fmt.Sprintf(`metric.type="%s" %s`, metricType, filterString), " ")
|
||||
}
|
||||
|
||||
func buildSLOFilterExpression(q sloQuery) string {
|
||||
sloName := fmt.Sprintf("projects/%s/services/%s/serviceLevelObjectives/%s", q.ProjectName, q.ServiceId, q.SloId)
|
||||
func buildSLOFilterExpression(projectName string, q *sloQuery) string {
|
||||
sloName := fmt.Sprintf("projects/%s/services/%s/serviceLevelObjectives/%s", projectName, q.ServiceId, q.SloId)
|
||||
|
||||
if q.SelectorName == "select_slo_burn_rate" {
|
||||
return fmt.Sprintf(`%s("%s", "%s")`, q.SelectorName, sloName, q.LookbackPeriod)
|
||||
@ -438,7 +488,7 @@ func buildSLOFilterExpression(q sloQuery) string {
|
||||
}
|
||||
}
|
||||
|
||||
func setMetricAggParams(params *url.Values, query *metricQuery, durationSeconds int, intervalMs int64) {
|
||||
func setMetricAggParams(params *url.Values, query *timeSeriesList, durationSeconds int, intervalMs int64) {
|
||||
if query.CrossSeriesReducer == "" {
|
||||
query.CrossSeriesReducer = crossSeriesReducerDefault
|
||||
}
|
||||
@ -452,7 +502,8 @@ func setMetricAggParams(params *url.Values, query *metricQuery, durationSeconds
|
||||
// In case a preprocessor is defined, the preprocessor becomes the primary aggregation
|
||||
// and the aggregation that is specified in the UI becomes the secondary aggregation
|
||||
// Rules are specified in this issue: https://github.com/grafana/grafana/issues/30866
|
||||
if query.PreprocessorType != PreprocessorTypeNone {
|
||||
t := toPreprocessorType(query.Preprocessor)
|
||||
if t != PreprocessorTypeNone {
|
||||
params.Add("secondaryAggregation.alignmentPeriod", alignmentPeriod)
|
||||
params.Add("secondaryAggregation.crossSeriesReducer", query.CrossSeriesReducer)
|
||||
params.Add("secondaryAggregation.perSeriesAligner", query.PerSeriesAligner)
|
||||
@ -464,7 +515,7 @@ func setMetricAggParams(params *url.Values, query *metricQuery, durationSeconds
|
||||
params.Add("aggregation.crossSeriesReducer", primaryCrossSeriesReducer)
|
||||
|
||||
aligner := "ALIGN_RATE"
|
||||
if query.PreprocessorType == PreprocessorTypeDelta {
|
||||
if t == PreprocessorTypeDelta {
|
||||
aligner = "ALIGN_DELTA"
|
||||
}
|
||||
params.Add("aggregation.perSeriesAligner", aligner)
|
||||
@ -484,8 +535,8 @@ func setMetricAggParams(params *url.Values, query *metricQuery, durationSeconds
|
||||
}
|
||||
}
|
||||
|
||||
func setSloAggParams(params *url.Values, query *sloQuery, durationSeconds int, intervalMs int64) {
|
||||
params.Add("aggregation.alignmentPeriod", calculateAlignmentPeriod(query.AlignmentPeriod, intervalMs, durationSeconds))
|
||||
func setSloAggParams(params *url.Values, query *sloQuery, alignmentPeriod string, durationSeconds int, intervalMs int64) {
|
||||
params.Add("aggregation.alignmentPeriod", calculateAlignmentPeriod(alignmentPeriod, intervalMs, durationSeconds))
|
||||
if query.SelectorName == "select_slo_health" {
|
||||
params.Add("aggregation.perSeriesAligner", "ALIGN_MEAN")
|
||||
} else {
|
||||
@ -515,12 +566,12 @@ func calculateAlignmentPeriod(alignmentPeriod string, intervalMs int64, duration
|
||||
}
|
||||
|
||||
func formatLegendKeys(metricType string, defaultMetricName string, labels map[string]string,
|
||||
additionalLabels map[string]string, query *cloudMonitoringTimeSeriesFilter) string {
|
||||
if query.AliasBy == "" {
|
||||
additionalLabels map[string]string, query *cloudMonitoringTimeSeriesList) string {
|
||||
if query.aliasBy == "" {
|
||||
return defaultMetricName
|
||||
}
|
||||
|
||||
result := legendKeyFormat.ReplaceAllFunc([]byte(query.AliasBy), func(in []byte) []byte {
|
||||
result := legendKeyFormat.ReplaceAllFunc([]byte(query.aliasBy), func(in []byte) []byte {
|
||||
metaPartName := strings.Replace(string(in), "{{", "", 1)
|
||||
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
|
||||
metaPartName = strings.TrimSpace(metaPartName)
|
||||
@ -543,20 +594,20 @@ func formatLegendKeys(metricType string, defaultMetricName string, labels map[st
|
||||
return []byte(val)
|
||||
}
|
||||
|
||||
if metaPartName == "project" && query.ProjectName != "" {
|
||||
return []byte(query.ProjectName)
|
||||
if metaPartName == "project" && query.parameters.ProjectName != "" {
|
||||
return []byte(query.parameters.ProjectName)
|
||||
}
|
||||
|
||||
if metaPartName == "service" && query.Service != "" {
|
||||
return []byte(query.Service)
|
||||
if metaPartName == "service" && query.sloQ.ServiceId != "" {
|
||||
return []byte(query.sloQ.ServiceId)
|
||||
}
|
||||
|
||||
if metaPartName == "slo" && query.Slo != "" {
|
||||
return []byte(query.Slo)
|
||||
if metaPartName == "slo" && query.sloQ.SloId != "" {
|
||||
return []byte(query.sloQ.SloId)
|
||||
}
|
||||
|
||||
if metaPartName == "selector" && query.Selector != "" {
|
||||
return []byte(query.Selector)
|
||||
if metaPartName == "selector" && query.sloQ.SelectorName != "" {
|
||||
return []byte(query.sloQ.SelectorName)
|
||||
}
|
||||
|
||||
return in
|
||||
|
@ -50,31 +50,60 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
func TestCloudMonitoring(t *testing.T) {
|
||||
service := &Service{}
|
||||
|
||||
t.Run("parses a time series list query", func(t *testing.T) {
|
||||
req := baseTimeSeriesList()
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
require.Len(t, queries, 1)
|
||||
assert.Equal(t, "A", queries[0].refID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].params.Encode())
|
||||
assert.Equal(t, 7, len(queries[0].params))
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].params["interval.endTime"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].params["view"][0])
|
||||
assert.Equal(t, "testalias", queries[0].aliasBy)
|
||||
})
|
||||
|
||||
t.Run("parses a time series query", func(t *testing.T) {
|
||||
req := baseTimeSeriesQuery()
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueryFromInterface(t, qes)
|
||||
|
||||
require.Len(t, queries, 1)
|
||||
assert.Equal(t, "A", queries[0].refID)
|
||||
assert.Equal(t, "foo", queries[0].parameters.Query)
|
||||
assert.Equal(t, "testalias", queries[0].aliasBy)
|
||||
})
|
||||
|
||||
t.Run("Parse migrated queries from frontend and build Google Cloud Monitoring API queries", func(t *testing.T) {
|
||||
t.Run("and query has no aggregation set", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
err := migrateRequest(req)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
require.Len(t, queries, 1)
|
||||
assert.Equal(t, "A", queries[0].RefID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].Target)
|
||||
assert.Equal(t, 7, len(queries[0].Params))
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].Params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].Params["view"][0])
|
||||
assert.Equal(t, "testalias", queries[0].AliasBy)
|
||||
assert.Equal(t, "A", queries[0].refID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].params.Encode())
|
||||
assert.Equal(t, 7, len(queries[0].params))
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].params["interval.endTime"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].params["view"][0])
|
||||
assert.Equal(t, "testalias", queries[0].aliasBy)
|
||||
|
||||
t.Run("and generated deep link has correct parameters", func(t *testing.T) {
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -91,7 +120,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("and query has filters", func(t *testing.T) {
|
||||
query := baseReq()
|
||||
query := deprecatedReq()
|
||||
query.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"filters": ["key", "=", "value", "AND", "key2", "=", "value2", "AND", "resource.type", "=", "another/resource/type"]
|
||||
@ -101,14 +130,14 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, query)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, `metric.type="a/metric/type" key="value" key2="value2" resource.type="another/resource/type"`, queries[0].Params["filter"][0])
|
||||
assert.Equal(t, `metric.type="a/metric/type" key="value" key2="value2" resource.type="another/resource/type"`, queries[0].params["filter"][0])
|
||||
|
||||
// assign a resource type to query parameters
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
// the deep link should not contain this resource type since another resource type is included in the query filters
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -124,7 +153,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
t.Run("and alignmentPeriod is set to grafana-auto", func(t *testing.T) {
|
||||
t.Run("and IntervalMS is larger than 60000", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].Interval = 1000000 * time.Millisecond
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"alignmentPeriod": "grafana-auto",
|
||||
@ -135,12 +164,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+1000s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+1000s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -154,7 +183,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter)
|
||||
})
|
||||
t.Run("and IntervalMS is less than 60000", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].Interval = 30000 * time.Millisecond
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"alignmentPeriod": "grafana-auto",
|
||||
@ -165,12 +194,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -189,7 +218,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
t.Run("and range is two hours", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].TimeRange.From = now.Add(-(time.Hour * 2))
|
||||
req.Queries[0].TimeRange.To = now
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
@ -201,12 +230,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
})
|
||||
|
||||
t.Run("and range is 22 hours", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].TimeRange.From = now.Add(-(time.Hour * 22))
|
||||
req.Queries[0].TimeRange.To = now
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
@ -218,12 +247,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
})
|
||||
|
||||
t.Run("and range is 23 hours", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].TimeRange.From = now.Add(-(time.Hour * 23))
|
||||
req.Queries[0].TimeRange.To = now
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
@ -235,12 +264,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+300s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+300s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
})
|
||||
|
||||
t.Run("and range is 7 days", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].TimeRange.From = now
|
||||
req.Queries[0].TimeRange.To = now.AddDate(0, 0, 7)
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
@ -252,8 +281,8 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+3600s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+3600s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
})
|
||||
})
|
||||
|
||||
@ -261,7 +290,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
now := time.Now().UTC()
|
||||
|
||||
t.Run("and range is two hours", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].TimeRange.From = now.Add(-(time.Hour * 2))
|
||||
req.Queries[0].TimeRange.To = now
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
@ -273,12 +302,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -293,7 +322,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("and range is 22 hours", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].TimeRange.From = now.Add(-(time.Hour * 22))
|
||||
req.Queries[0].TimeRange.To = now
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
@ -305,12 +334,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+60s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -325,7 +354,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("and range is 23 hours", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].TimeRange.From = now.Add(-(time.Hour * 23))
|
||||
req.Queries[0].TimeRange.To = now
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
@ -337,12 +366,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+300s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+300s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -357,7 +386,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("and range is 7 days", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].TimeRange.From = now.AddDate(0, 0, -7)
|
||||
req.Queries[0].TimeRange.To = now
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
@ -369,12 +398,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+3600s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+3600s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -391,7 +420,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
t.Run("and alignmentPeriod is set in frontend", func(t *testing.T) {
|
||||
t.Run("and alignment period is within accepted range", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].Interval = 1000
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"alignmentPeriod": "+600s"
|
||||
@ -401,12 +430,12 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `+600s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `+600s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -422,7 +451,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("and query has aggregation mean set", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"crossSeriesReducer": "REDUCE_SUM",
|
||||
@ -433,23 +462,23 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "A", queries[0].RefID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_SUM&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].Target)
|
||||
assert.Equal(t, 7, len(queries[0].Params))
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].Params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].Params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].Params["view"][0])
|
||||
assert.Equal(t, "A", queries[0].refID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_SUM&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].params.Encode())
|
||||
assert.Equal(t, 7, len(queries[0].params))
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].params["interval.endTime"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].params["view"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -467,7 +496,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("and query has group bys", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"crossSeriesReducer": "REDUCE_NONE",
|
||||
@ -479,23 +508,23 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "A", queries[0].RefID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].Target)
|
||||
assert.Equal(t, 8, len(queries[0].Params))
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "metric.label.group1", queries[0].Params["aggregation.groupByFields"][0])
|
||||
assert.Equal(t, "metric.label.group2", queries[0].Params["aggregation.groupByFields"][1])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].Params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].Params["view"][0])
|
||||
assert.Equal(t, "A", queries[0].refID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].params.Encode())
|
||||
assert.Equal(t, 8, len(queries[0].params))
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].params["interval.endTime"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "metric.label.group1", queries[0].params["aggregation.groupByFields"][0])
|
||||
assert.Equal(t, "metric.label.group2", queries[0].params["aggregation.groupByFields"][1])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].params["view"][0])
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -536,28 +565,31 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
err := migrateRequest(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Run("and query type is metrics", func(t *testing.T) {
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "A", queries[0].RefID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].Target)
|
||||
assert.Equal(t, 8, len(queries[0].Params))
|
||||
assert.Equal(t, "metric.label.group1", queries[0].Params["aggregation.groupByFields"][0])
|
||||
assert.Equal(t, "metric.label.group2", queries[0].Params["aggregation.groupByFields"][1])
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].Params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].Params["view"][0])
|
||||
assert.Equal(t, "testalias", queries[0].AliasBy)
|
||||
assert.Equal(t, []string{"metric.label.group1", "metric.label.group2"}, queries[0].GroupBys)
|
||||
assert.Equal(t, "A", queries[0].refID)
|
||||
assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].params.Encode())
|
||||
assert.Equal(t, 8, len(queries[0].params))
|
||||
assert.Equal(t, "metric.label.group1", queries[0].params["aggregation.groupByFields"][0])
|
||||
assert.Equal(t, "metric.label.group2", queries[0].params["aggregation.groupByFields"][1])
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].params["interval.endTime"][0])
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].params["filter"][0])
|
||||
assert.Equal(t, "FULL", queries[0].params["view"][0])
|
||||
assert.Equal(t, "testalias", queries[0].aliasBy)
|
||||
assert.Equal(t, []string{"metric.label.group1", "metric.label.group2"}, queries[0].parameters.GroupBys)
|
||||
|
||||
// assign resource type to query parameters to be included in the deep link filter
|
||||
// in the actual workflow this information comes from the response of the Monitoring API
|
||||
queries[0].Params.Set("resourceType", "a/resource/type")
|
||||
queries[0].params.Set("resourceType", "a/resource/type")
|
||||
dl := queries[0].buildDeepLink()
|
||||
|
||||
expectedTimeSelection := map[string]string{
|
||||
@ -583,21 +615,18 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
},
|
||||
"sloQuery": {}
|
||||
}`)
|
||||
err = migrateRequest(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
qes, err = service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
tqueries := make([]*cloudMonitoringTimeSeriesQuery, 0)
|
||||
for _, qi := range qes {
|
||||
q, ok := qi.(*cloudMonitoringTimeSeriesQuery)
|
||||
assert.True(t, ok)
|
||||
tqueries = append(tqueries, q)
|
||||
}
|
||||
|
||||
tqueries := getCloudMonitoringQueryFromInterface(t, qes)
|
||||
assert.Equal(t, 1, len(tqueries))
|
||||
assert.Equal(t, "A", tqueries[0].RefID)
|
||||
assert.Equal(t, "test-proj", tqueries[0].ProjectName)
|
||||
assert.Equal(t, "test-query", tqueries[0].Query)
|
||||
assert.Equal(t, "test-alias", tqueries[0].AliasBy)
|
||||
assert.Equal(t, "A", tqueries[0].refID)
|
||||
assert.Equal(t, "test-proj", tqueries[0].parameters.ProjectName)
|
||||
assert.Equal(t, "test-query", tqueries[0].parameters.Query)
|
||||
assert.Equal(t, "test-alias", tqueries[0].aliasBy)
|
||||
})
|
||||
|
||||
t.Run("and query type is SLOs", func(t *testing.T) {
|
||||
@ -614,20 +643,22 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
},
|
||||
"metricQuery": {}
|
||||
}`)
|
||||
err := migrateRequest(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "A", queries[0].RefID)
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0])
|
||||
assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "", queries[0].AliasBy)
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, `aggregation.alignmentPeriod=%2B60s&aggregation.perSeriesAligner=ALIGN_MEAN&filter=select_slo_health%28%22projects%2Ftest-proj%2Fservices%2Ftest-service%2FserviceLevelObjectives%2Ftest-slo%22%29&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z`, queries[0].Target)
|
||||
assert.Equal(t, 5, len(queries[0].Params))
|
||||
assert.Equal(t, "A", queries[0].refID)
|
||||
assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].params["interval.startTime"][0])
|
||||
assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].params["interval.endTime"][0])
|
||||
assert.Equal(t, `+60s`, queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "", queries[0].aliasBy)
|
||||
assert.Equal(t, "ALIGN_MEAN", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, `aggregation.alignmentPeriod=%2B60s&aggregation.perSeriesAligner=ALIGN_MEAN&filter=select_slo_health%28%22projects%2Ftest-proj%2Fservices%2Ftest-service%2FserviceLevelObjectives%2Ftest-slo%22%29&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z`, queries[0].params.Encode())
|
||||
assert.Equal(t, 5, len(queries[0].params))
|
||||
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"queryType": "slo",
|
||||
@ -642,11 +673,13 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
},
|
||||
"metricQuery": {}
|
||||
}`)
|
||||
err = migrateRequest(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
qes, err = service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
qqueries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, "ALIGN_NEXT_OLDER", qqueries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
qqueries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, "ALIGN_NEXT_OLDER", qqueries[0].params["aggregation.perSeriesAligner"][0])
|
||||
|
||||
dl := qqueries[0].buildDeepLink()
|
||||
assert.Empty(t, dl)
|
||||
@ -665,11 +698,13 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
},
|
||||
"metricQuery": {}
|
||||
}`)
|
||||
err = migrateRequest(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
qes, err = service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
qqqueries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
assert.Equal(t, `aggregation.alignmentPeriod=%2B60s&aggregation.perSeriesAligner=ALIGN_NEXT_OLDER&filter=select_slo_burn_rate%28%22projects%2Ftest-proj%2Fservices%2Ftest-service%2FserviceLevelObjectives%2Ftest-slo%22%2C+%221h%22%29&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z`, qqqueries[0].Target)
|
||||
qqqueries := getCloudMonitoringListFromInterface(t, qes)
|
||||
assert.Equal(t, `aggregation.alignmentPeriod=%2B60s&aggregation.perSeriesAligner=ALIGN_NEXT_OLDER&filter=select_slo_burn_rate%28%22projects%2Ftest-proj%2Fservices%2Ftest-service%2FserviceLevelObjectives%2Ftest-slo%22%2C+%221h%22%29&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z`, qqqueries[0].params.Encode())
|
||||
})
|
||||
})
|
||||
|
||||
@ -761,7 +796,7 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("and query preprocessor is not defined", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"crossSeriesReducer": "REDUCE_MIN",
|
||||
@ -775,22 +810,22 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].Params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].Params["aggregation.groupByFields"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].params["aggregation.groupByFields"][0])
|
||||
|
||||
assert.NotContains(t, queries[0].Params, "secondaryAggregation.crossSeriesReducer")
|
||||
assert.NotContains(t, "REDUCE_SUM", queries[0].Params, "secondaryAggregation.perSeriesAligner")
|
||||
assert.NotContains(t, "+60s", queries[0].Params, "secondaryAggregation.alignmentPeriod")
|
||||
assert.NotContains(t, "labelname", queries[0].Params, "secondaryAggregation.groupByFields")
|
||||
assert.NotContains(t, queries[0].params, "secondaryAggregation.crossSeriesReducer")
|
||||
assert.NotContains(t, "REDUCE_SUM", queries[0].params, "secondaryAggregation.perSeriesAligner")
|
||||
assert.NotContains(t, "+60s", queries[0].params, "secondaryAggregation.alignmentPeriod")
|
||||
assert.NotContains(t, "labelname", queries[0].params, "secondaryAggregation.groupByFields")
|
||||
})
|
||||
|
||||
t.Run("and query preprocessor is set to none", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"crossSeriesReducer": "REDUCE_MIN",
|
||||
@ -805,22 +840,22 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].Params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].Params["aggregation.groupByFields"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].params["aggregation.groupByFields"][0])
|
||||
|
||||
assert.NotContains(t, queries[0].Params, "secondaryAggregation.crossSeriesReducer")
|
||||
assert.NotContains(t, "REDUCE_SUM", queries[0].Params, "secondaryAggregation.perSeriesAligner")
|
||||
assert.NotContains(t, "+60s", queries[0].Params, "secondaryAggregation.alignmentPeriod")
|
||||
assert.NotContains(t, "labelname", queries[0].Params, "secondaryAggregation.groupByFields")
|
||||
assert.NotContains(t, queries[0].params, "secondaryAggregation.crossSeriesReducer")
|
||||
assert.NotContains(t, "REDUCE_SUM", queries[0].params, "secondaryAggregation.perSeriesAligner")
|
||||
assert.NotContains(t, "+60s", queries[0].params, "secondaryAggregation.alignmentPeriod")
|
||||
assert.NotContains(t, "labelname", queries[0].params, "secondaryAggregation.groupByFields")
|
||||
})
|
||||
|
||||
t.Run("and query preprocessor is set to rate and there's no group bys", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"crossSeriesReducer": "REDUCE_SUM",
|
||||
@ -835,20 +870,20 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "REDUCE_NONE", queries[0].Params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_RATE", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "REDUCE_NONE", queries[0].params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_RATE", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].Params["secondaryAggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].Params["secondaryAggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["secondaryAggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].params["secondaryAggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].params["secondaryAggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["secondaryAggregation.alignmentPeriod"][0])
|
||||
})
|
||||
|
||||
t.Run("and query preprocessor is set to rate and group bys exist", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"crossSeriesReducer": "REDUCE_SUM",
|
||||
@ -863,22 +898,22 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].Params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_RATE", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].Params["aggregation.groupByFields"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_RATE", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].params["aggregation.groupByFields"][0])
|
||||
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].Params["secondaryAggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].Params["secondaryAggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["secondaryAggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].Params["secondaryAggregation.groupByFields"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].params["secondaryAggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].params["secondaryAggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["secondaryAggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].params["secondaryAggregation.groupByFields"][0])
|
||||
})
|
||||
|
||||
t.Run("and query preprocessor is set to delta and there's no group bys", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"crossSeriesReducer": "REDUCE_MIN",
|
||||
@ -893,20 +928,20 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "REDUCE_NONE", queries[0].Params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_DELTA", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "REDUCE_NONE", queries[0].params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_DELTA", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].Params["secondaryAggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].Params["secondaryAggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["secondaryAggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].params["secondaryAggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].params["secondaryAggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["secondaryAggregation.alignmentPeriod"][0])
|
||||
})
|
||||
|
||||
t.Run("and query preprocessor is set to delta and group bys exist", func(t *testing.T) {
|
||||
req := baseReq()
|
||||
req := deprecatedReq()
|
||||
req.Queries[0].JSON = json.RawMessage(`{
|
||||
"metricType": "a/metric/type",
|
||||
"crossSeriesReducer": "REDUCE_MIN",
|
||||
@ -921,27 +956,39 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
|
||||
qes, err := service.buildQueryExecutors(slog, req)
|
||||
require.NoError(t, err)
|
||||
queries := getCloudMonitoringQueriesFromInterface(t, qes)
|
||||
queries := getCloudMonitoringListFromInterface(t, qes)
|
||||
|
||||
assert.Equal(t, 1, len(queries))
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].Params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_DELTA", queries[0].Params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].Params["aggregation.groupByFields"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].params["aggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "ALIGN_DELTA", queries[0].params["aggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["aggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].params["aggregation.groupByFields"][0])
|
||||
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].Params["secondaryAggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].Params["secondaryAggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].Params["secondaryAggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].Params["secondaryAggregation.groupByFields"][0])
|
||||
assert.Equal(t, "REDUCE_MIN", queries[0].params["secondaryAggregation.crossSeriesReducer"][0])
|
||||
assert.Equal(t, "REDUCE_SUM", queries[0].params["secondaryAggregation.perSeriesAligner"][0])
|
||||
assert.Equal(t, "+60s", queries[0].params["secondaryAggregation.alignmentPeriod"][0])
|
||||
assert.Equal(t, "labelname", queries[0].params["secondaryAggregation.groupByFields"][0])
|
||||
})
|
||||
}
|
||||
|
||||
func getCloudMonitoringQueriesFromInterface(t *testing.T, qes []cloudMonitoringQueryExecutor) []*cloudMonitoringTimeSeriesFilter {
|
||||
func getCloudMonitoringListFromInterface(t *testing.T, qes []cloudMonitoringQueryExecutor) []*cloudMonitoringTimeSeriesList {
|
||||
t.Helper()
|
||||
|
||||
queries := make([]*cloudMonitoringTimeSeriesFilter, 0)
|
||||
queries := make([]*cloudMonitoringTimeSeriesList, 0)
|
||||
for _, qi := range qes {
|
||||
q, ok := qi.(*cloudMonitoringTimeSeriesFilter)
|
||||
q, ok := qi.(*cloudMonitoringTimeSeriesList)
|
||||
require.Truef(t, ok, "Received wrong type %T", qi)
|
||||
queries = append(queries, q)
|
||||
}
|
||||
return queries
|
||||
}
|
||||
|
||||
func getCloudMonitoringQueryFromInterface(t *testing.T, qes []cloudMonitoringQueryExecutor) []*cloudMonitoringTimeSeriesQuery {
|
||||
t.Helper()
|
||||
|
||||
queries := make([]*cloudMonitoringTimeSeriesQuery, 0)
|
||||
for _, qi := range qes {
|
||||
q, ok := qi.(*cloudMonitoringTimeSeriesQuery)
|
||||
require.Truef(t, ok, "Received wrong type %T", qi)
|
||||
queries = append(queries, q)
|
||||
}
|
||||
@ -1009,7 +1056,7 @@ func verifyDeepLink(t *testing.T, dl string, expectedTimeSelection map[string]st
|
||||
}
|
||||
}
|
||||
|
||||
func baseReq() *backend.QueryDataRequest {
|
||||
func deprecatedReq() *backend.QueryDataRequest {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
query := &backend.QueryDataRequest{
|
||||
Queries: []backend.DataQuery{
|
||||
@ -1031,6 +1078,54 @@ func baseReq() *backend.QueryDataRequest {
|
||||
return query
|
||||
}
|
||||
|
||||
func baseTimeSeriesList() *backend.QueryDataRequest {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
query := &backend.QueryDataRequest{
|
||||
Queries: []backend.DataQuery{
|
||||
{
|
||||
RefID: "A",
|
||||
TimeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
QueryType: "metrics",
|
||||
JSON: json.RawMessage(`{
|
||||
"timeSeriesList": {
|
||||
"metricType": "a/metric/type",
|
||||
"view": "FULL"
|
||||
},
|
||||
"aliasBy": "testalias"
|
||||
}`),
|
||||
},
|
||||
},
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
func baseTimeSeriesQuery() *backend.QueryDataRequest {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
query := &backend.QueryDataRequest{
|
||||
Queries: []backend.DataQuery{
|
||||
{
|
||||
RefID: "A",
|
||||
TimeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
QueryType: "metrics",
|
||||
JSON: json.RawMessage(`{
|
||||
"queryType": "metrics",
|
||||
"timeSeriesQuery": {
|
||||
"query": "foo"
|
||||
},
|
||||
"aliasBy": "testalias"
|
||||
}`),
|
||||
},
|
||||
},
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
func TestCheckHealth(t *testing.T) {
|
||||
t.Run("and using GCE authentation should return proper error", func(t *testing.T) {
|
||||
im := datasource.NewInstanceManager(func(s backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
|
||||
|
@ -18,8 +18,8 @@ import (
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
)
|
||||
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) doRequestFilterPage(ctx context.Context, r *http.Request, dsInfo datasourceInfo) (cloudMonitoringResponse, error) {
|
||||
r.URL.RawQuery = timeSeriesFilter.Params.Encode()
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) doRequestFilterPage(ctx context.Context, r *http.Request, dsInfo datasourceInfo) (cloudMonitoringResponse, error) {
|
||||
r.URL.RawQuery = timeSeriesFilter.params.Encode()
|
||||
r = r.WithContext(ctx)
|
||||
res, err := dsInfo.services[cloudMonitor].client.Do(r)
|
||||
if err != nil {
|
||||
@ -34,10 +34,10 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) doRequestFilterPage(ctx
|
||||
return dnext, nil
|
||||
}
|
||||
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context, req *backend.QueryDataRequest,
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) run(ctx context.Context, req *backend.QueryDataRequest,
|
||||
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
|
||||
dr := &backend.DataResponse{}
|
||||
projectName := timeSeriesFilter.ProjectName
|
||||
projectName := timeSeriesFilter.parameters.ProjectName
|
||||
if projectName == "" {
|
||||
var err error
|
||||
projectName, err = s.getDefaultProject(ctx, dsInfo)
|
||||
@ -71,7 +71,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context
|
||||
}
|
||||
|
||||
ctx, span := tracer.Start(ctx, "cloudMonitoring query")
|
||||
span.SetAttributes("target", timeSeriesFilter.Target, attribute.Key("target").String(timeSeriesFilter.Target))
|
||||
span.SetAttributes("target", timeSeriesFilter.params.Encode(), attribute.Key("target").String(timeSeriesFilter.params.Encode()))
|
||||
span.SetAttributes("from", req.Queries[0].TimeRange.From, attribute.Key("from").String(req.Queries[0].TimeRange.From.String()))
|
||||
span.SetAttributes("until", req.Queries[0].TimeRange.To, attribute.Key("until").String(req.Queries[0].TimeRange.To.String()))
|
||||
span.SetAttributes("datasource_id", dsInfo.id, attribute.Key("datasource_id").Int64(dsInfo.id))
|
||||
@ -86,7 +86,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context
|
||||
}
|
||||
nextPageToken := d.NextPageToken
|
||||
for nextPageToken != "" {
|
||||
timeSeriesFilter.Params["pageToken"] = []string{d.NextPageToken}
|
||||
timeSeriesFilter.params["pageToken"] = []string{d.NextPageToken}
|
||||
nextPage, err := timeSeriesFilter.doRequestFilterPage(ctx, r, dsInfo)
|
||||
if err != nil {
|
||||
dr.Error = err
|
||||
@ -100,7 +100,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context
|
||||
}
|
||||
|
||||
//nolint:gocyclo
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes *backend.DataResponse,
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) parseResponse(queryRes *backend.DataResponse,
|
||||
response cloudMonitoringResponse, executedQueryString string) error {
|
||||
frames := data.Frames{}
|
||||
|
||||
@ -112,7 +112,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
seriesLabels["resource.type"] = series.Resource.Type
|
||||
|
||||
frame := data.NewFrameOfFieldTypes("", len(series.Points), data.FieldTypeTime, data.FieldTypeFloat64)
|
||||
frame.RefID = timeSeriesFilter.RefID
|
||||
frame.RefID = timeSeriesFilter.refID
|
||||
frame.Meta = &data.FrameMeta{
|
||||
ExecutedQueryString: executedQueryString,
|
||||
}
|
||||
@ -121,7 +121,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
labels["metric.label."+key] = value
|
||||
seriesLabels["metric.label."+key] = value
|
||||
|
||||
if len(timeSeriesFilter.GroupBys) == 0 || containsLabel(timeSeriesFilter.GroupBys, "metric.label."+key) {
|
||||
if len(timeSeriesFilter.parameters.GroupBys) == 0 || containsLabel(timeSeriesFilter.parameters.GroupBys, "metric.label."+key) {
|
||||
defaultMetricName += " " + value
|
||||
}
|
||||
}
|
||||
@ -130,7 +130,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
labels["resource.label."+key] = value
|
||||
seriesLabels["resource.label."+key] = value
|
||||
|
||||
if containsLabel(timeSeriesFilter.GroupBys, "resource.label."+key) {
|
||||
if containsLabel(timeSeriesFilter.parameters.GroupBys, "resource.label."+key) {
|
||||
defaultMetricName += " " + value
|
||||
}
|
||||
}
|
||||
@ -161,10 +161,10 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
}
|
||||
|
||||
customFrameMeta := map[string]interface{}{}
|
||||
customFrameMeta["alignmentPeriod"] = timeSeriesFilter.Params.Get("aggregation.alignmentPeriod")
|
||||
customFrameMeta["perSeriesAligner"] = timeSeriesFilter.Params.Get("aggregation.perSeriesAligner")
|
||||
customFrameMeta["alignmentPeriod"] = timeSeriesFilter.params.Get("aggregation.alignmentPeriod")
|
||||
customFrameMeta["perSeriesAligner"] = timeSeriesFilter.params.Get("aggregation.perSeriesAligner")
|
||||
customFrameMeta["labels"] = labels
|
||||
customFrameMeta["groupBys"] = timeSeriesFilter.GroupBys
|
||||
customFrameMeta["groupBys"] = timeSeriesFilter.parameters.GroupBys
|
||||
if frame.Meta != nil {
|
||||
frame.Meta.Custom = customFrameMeta
|
||||
} else {
|
||||
@ -208,7 +208,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
timeField,
|
||||
valueField,
|
||||
},
|
||||
RefID: timeSeriesFilter.RefID,
|
||||
RefID: timeSeriesFilter.refID,
|
||||
Meta: &data.FrameMeta{
|
||||
ExecutedQueryString: executedQueryString,
|
||||
},
|
||||
@ -227,7 +227,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
}
|
||||
if len(response.TimeSeries) > 0 {
|
||||
dl := timeSeriesFilter.buildDeepLink()
|
||||
frames = addConfigData(frames, dl, response.Unit, timeSeriesFilter.Params.Get("aggregation.alignmentPeriod"))
|
||||
frames = addConfigData(frames, dl, response.Unit, timeSeriesFilter.params.Get("aggregation.alignmentPeriod"))
|
||||
}
|
||||
|
||||
queryRes.Frames = frames
|
||||
@ -235,7 +235,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes
|
||||
return nil
|
||||
}
|
||||
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) handleNonDistributionSeries(series timeSeries,
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) handleNonDistributionSeries(series timeSeries,
|
||||
defaultMetricName string, seriesLabels map[string]string, frame *data.Frame) {
|
||||
for i := 0; i < len(series.Points); i++ {
|
||||
point := series.Points[i]
|
||||
@ -265,14 +265,14 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) handleNonDistributionSe
|
||||
setDisplayNameAsFieldName(dataField)
|
||||
}
|
||||
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) buildDeepLink() string {
|
||||
if timeSeriesFilter.Slo != "" {
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) buildDeepLink() string {
|
||||
if timeSeriesFilter.sloQ != nil && timeSeriesFilter.sloQ.SloId != "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
filter := timeSeriesFilter.Params.Get("filter")
|
||||
filter := timeSeriesFilter.params.Get("filter")
|
||||
if !strings.Contains(filter, "resource.type=") {
|
||||
resourceType := timeSeriesFilter.Params.Get("resourceType")
|
||||
resourceType := timeSeriesFilter.params.Get("resourceType")
|
||||
if resourceType != "" {
|
||||
filter = fmt.Sprintf(`resource.type="%s" %s`, resourceType, filter)
|
||||
}
|
||||
@ -281,12 +281,12 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) buildDeepLink() string
|
||||
u, err := url.Parse("https://console.cloud.google.com/monitoring/metrics-explorer")
|
||||
if err != nil {
|
||||
slog.Error("Failed to generate deep link: unable to parse metrics explorer URL", "ProjectName",
|
||||
timeSeriesFilter.ProjectName, "query", timeSeriesFilter.RefID)
|
||||
timeSeriesFilter.parameters.ProjectName, "query", timeSeriesFilter.refID)
|
||||
return ""
|
||||
}
|
||||
|
||||
rawQuery := u.Query()
|
||||
rawQuery.Set("project", timeSeriesFilter.ProjectName)
|
||||
rawQuery.Set("project", timeSeriesFilter.parameters.ProjectName)
|
||||
rawQuery.Set("Grafana_deeplink", "true")
|
||||
|
||||
pageState := map[string]interface{}{
|
||||
@ -296,11 +296,11 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) buildDeepLink() string
|
||||
{
|
||||
"timeSeriesFilter": map[string]interface{}{
|
||||
"aggregations": []string{},
|
||||
"crossSeriesReducer": timeSeriesFilter.Params.Get("aggregation.crossSeriesReducer"),
|
||||
"crossSeriesReducer": timeSeriesFilter.params.Get("aggregation.crossSeriesReducer"),
|
||||
"filter": filter,
|
||||
"groupByFields": timeSeriesFilter.Params["aggregation.groupByFields"],
|
||||
"minAlignmentPeriod": strings.TrimPrefix(timeSeriesFilter.Params.Get("aggregation.alignmentPeriod"), "+"), // get rid of leading +
|
||||
"perSeriesAligner": timeSeriesFilter.Params.Get("aggregation.perSeriesAligner"),
|
||||
"groupByFields": timeSeriesFilter.params["aggregation.groupByFields"],
|
||||
"minAlignmentPeriod": strings.TrimPrefix(timeSeriesFilter.params.Get("aggregation.alignmentPeriod"), "+"), // get rid of leading +
|
||||
"perSeriesAligner": timeSeriesFilter.params.Get("aggregation.perSeriesAligner"),
|
||||
"secondaryGroupByFields": []string{},
|
||||
"unitOverride": "1",
|
||||
},
|
||||
@ -314,15 +314,15 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) buildDeepLink() string
|
||||
},
|
||||
"timeSelection": map[string]string{
|
||||
"timeRange": "custom",
|
||||
"start": timeSeriesFilter.Params.Get("interval.startTime"),
|
||||
"end": timeSeriesFilter.Params.Get("interval.endTime"),
|
||||
"start": timeSeriesFilter.params.Get("interval.startTime"),
|
||||
"end": timeSeriesFilter.params.Get("interval.endTime"),
|
||||
},
|
||||
}
|
||||
|
||||
blob, err := json.Marshal(pageState)
|
||||
if err != nil {
|
||||
slog.Error("Failed to generate deep link", "pageState", pageState, "ProjectName", timeSeriesFilter.ProjectName,
|
||||
"query", timeSeriesFilter.RefID)
|
||||
slog.Error("Failed to generate deep link", "pageState", pageState, "ProjectName", timeSeriesFilter.parameters.ProjectName,
|
||||
"query", timeSeriesFilter.refID)
|
||||
return ""
|
||||
}
|
||||
|
||||
@ -332,7 +332,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) buildDeepLink() string
|
||||
accountChooserURL, err := url.Parse("https://accounts.google.com/AccountChooser")
|
||||
if err != nil {
|
||||
slog.Error("Failed to generate deep link: unable to parse account chooser URL", "ProjectName",
|
||||
timeSeriesFilter.ProjectName, "query", timeSeriesFilter.RefID)
|
||||
timeSeriesFilter.parameters.ProjectName, "query", timeSeriesFilter.refID)
|
||||
return ""
|
||||
}
|
||||
accountChooserQuery := accountChooserURL.Query()
|
||||
@ -349,6 +349,6 @@ func setDisplayNameAsFieldName(f *data.Field) {
|
||||
f.Config.DisplayNameFromDS = f.Name
|
||||
}
|
||||
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) getRefID() string {
|
||||
return timeSeriesFilter.RefID
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) getRefID() string {
|
||||
return timeSeriesFilter.refID
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -47,7 +47,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -87,9 +87,9 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, GroupBys: []string{
|
||||
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{GroupBys: []string{
|
||||
"metric.label.instance_name", "resource.label.zone",
|
||||
}}
|
||||
}}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -108,7 +108,13 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
|
||||
t.Run("and the alias pattern is for metric type, a metric label and a resource label", func(t *testing.T) {
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{
|
||||
GroupBys: []string{"metric.label.instance_name", "resource.label.zone"},
|
||||
},
|
||||
aliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -121,7 +127,11 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("and the alias pattern is for metric name", func(t *testing.T) {
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}},
|
||||
aliasBy: "metric {{metric.name}} service {{metric.service}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -139,7 +149,11 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{},
|
||||
aliasBy: "{{bucket}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -180,7 +194,11 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{},
|
||||
aliasBy: "{{bucket}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -214,7 +232,11 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{},
|
||||
aliasBy: "{{bucket}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
@ -250,7 +272,11 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
|
||||
t.Run("and systemlabel contains key with array of string", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test}}"}
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{},
|
||||
aliasBy: "{{metadata.system_labels.test}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -264,7 +290,11 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
|
||||
t.Run("and systemlabel contains key with array of string2", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test2}}"}
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{},
|
||||
aliasBy: "{{metadata.system_labels.test2}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -281,13 +311,17 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{
|
||||
Params: url.Values{},
|
||||
ProjectName: "test-proj",
|
||||
Selector: "select_slo_compliance",
|
||||
Service: "test-service",
|
||||
Slo: "test-slo",
|
||||
AliasBy: "{{project}} - {{service}} - {{slo}} - {{selector}}",
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{
|
||||
ProjectName: "test-proj",
|
||||
},
|
||||
aliasBy: "{{project}} - {{service}} - {{slo}} - {{selector}}",
|
||||
sloQ: &sloQuery{
|
||||
SelectorName: "select_slo_compliance",
|
||||
ServiceId: "test-service",
|
||||
SloId: "test-slo",
|
||||
},
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
@ -304,12 +338,16 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{
|
||||
Params: url.Values{},
|
||||
ProjectName: "test-proj",
|
||||
Selector: "select_slo_compliance",
|
||||
Service: "test-service",
|
||||
Slo: "test-slo",
|
||||
query := &cloudMonitoringTimeSeriesList{
|
||||
params: url.Values{},
|
||||
parameters: &timeSeriesList{
|
||||
ProjectName: "test-proj",
|
||||
},
|
||||
sloQ: &sloQuery{
|
||||
SelectorName: "select_slo_compliance",
|
||||
ServiceId: "test-service",
|
||||
SloId: "test-slo",
|
||||
},
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
@ -325,7 +363,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -338,7 +376,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -358,9 +396,11 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
parameters: &timeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
},
|
||||
aliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
@ -378,7 +418,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, parameters: &timeSeriesList{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -394,9 +434,9 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{
|
||||
query := &cloudMonitoringTimeSeriesList{params: url.Values{
|
||||
"aggregation.alignmentPeriod": []string{"+60s"},
|
||||
}}
|
||||
}, parameters: &timeSeriesList{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
@ -411,7 +451,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
require.NoError(t, (&cloudMonitoringTimeSeriesFilter{GroupBys: []string{"test_group_by"}}).parseResponse(res, data, "test_query"))
|
||||
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
|
||||
|
||||
require.NotNil(t, res.Frames[0].Meta)
|
||||
assert.Equal(t, sdkdata.FrameMeta{
|
||||
@ -434,7 +474,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
require.NoError(t, (&cloudMonitoringTimeSeriesFilter{GroupBys: []string{"test_group_by"}}).parseResponse(res, data, "test_query"))
|
||||
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
|
||||
|
||||
require.NotNil(t, res.Frames[0].Meta)
|
||||
assert.Equal(t, sdkdata.FrameMeta{
|
||||
@ -457,7 +497,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
require.NoError(t, (&cloudMonitoringTimeSeriesFilter{GroupBys: []string{"test_group_by"}}).parseResponse(res, data, "test_query"))
|
||||
require.NoError(t, (&cloudMonitoringTimeSeriesList{parameters: &timeSeriesList{GroupBys: []string{"test_group_by"}}}).parseResponse(res, data, "test_query"))
|
||||
|
||||
require.NotNil(t, res.Frames[0].Meta)
|
||||
assert.Equal(t, sdkdata.FrameMeta{
|
||||
@ -481,7 +521,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, Slo: "yes"}
|
||||
query := &cloudMonitoringTimeSeriesList{params: url.Values{}, sloQ: &sloQuery{SloId: "yes"}, parameters: &timeSeriesList{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
|
@ -25,13 +25,13 @@ import (
|
||||
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) appendGraphPeriod(req *backend.QueryDataRequest) string {
|
||||
// GraphPeriod needs to be explicitly disabled.
|
||||
// If not set, the default behavior is to set an automatic value
|
||||
if timeSeriesQuery.GraphPeriod != "disabled" {
|
||||
if timeSeriesQuery.GraphPeriod == "auto" || timeSeriesQuery.GraphPeriod == "" {
|
||||
if timeSeriesQuery.parameters.GraphPeriod != "disabled" {
|
||||
if timeSeriesQuery.parameters.GraphPeriod == "auto" || timeSeriesQuery.parameters.GraphPeriod == "" {
|
||||
intervalCalculator := intervalv2.NewCalculator(intervalv2.CalculatorOptions{})
|
||||
interval := intervalCalculator.Calculate(req.Queries[0].TimeRange, time.Duration(timeSeriesQuery.IntervalMS/1000)*time.Second, req.Queries[0].MaxDataPoints)
|
||||
timeSeriesQuery.GraphPeriod = interval.Text
|
||||
timeSeriesQuery.parameters.GraphPeriod = interval.Text
|
||||
}
|
||||
return fmt.Sprintf(" | graph_period %s", timeSeriesQuery.GraphPeriod)
|
||||
return fmt.Sprintf(" | graph_period %s", timeSeriesQuery.parameters.GraphPeriod)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
@ -57,7 +57,7 @@ func doRequestQueryPage(log log.Logger, requestBody map[string]interface{}, r *h
|
||||
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context, req *backend.QueryDataRequest,
|
||||
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
|
||||
dr := &backend.DataResponse{}
|
||||
projectName := timeSeriesQuery.ProjectName
|
||||
projectName := timeSeriesQuery.parameters.ProjectName
|
||||
|
||||
if projectName == "" {
|
||||
var err error
|
||||
@ -69,21 +69,21 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context,
|
||||
timeSeriesQuery.logger.Info("No project name set on query, using project name from datasource", "projectName", projectName)
|
||||
}
|
||||
|
||||
timeSeriesQuery.Query += timeSeriesQuery.appendGraphPeriod(req)
|
||||
timeSeriesQuery.parameters.Query += timeSeriesQuery.appendGraphPeriod(req)
|
||||
from := req.Queries[0].TimeRange.From
|
||||
to := req.Queries[0].TimeRange.To
|
||||
timeFormat := "2006/01/02-15:04:05"
|
||||
timeSeriesQuery.Query += fmt.Sprintf(" | within d'%s', d'%s'", from.UTC().Format(timeFormat), to.UTC().Format(timeFormat))
|
||||
timeSeriesQuery.parameters.Query += fmt.Sprintf(" | within d'%s', d'%s'", from.UTC().Format(timeFormat), to.UTC().Format(timeFormat))
|
||||
p := path.Join("/v3/projects", projectName, "timeSeries:query")
|
||||
|
||||
ctx, span := tracer.Start(ctx, "cloudMonitoring MQL query")
|
||||
span.SetAttributes("query", timeSeriesQuery.Query, attribute.Key("query").String(timeSeriesQuery.Query))
|
||||
span.SetAttributes("query", timeSeriesQuery.parameters.Query, attribute.Key("query").String(timeSeriesQuery.parameters.Query))
|
||||
span.SetAttributes("from", req.Queries[0].TimeRange.From, attribute.Key("from").String(req.Queries[0].TimeRange.From.String()))
|
||||
span.SetAttributes("until", req.Queries[0].TimeRange.To, attribute.Key("until").String(req.Queries[0].TimeRange.To.String()))
|
||||
defer span.End()
|
||||
|
||||
requestBody := map[string]interface{}{
|
||||
"query": timeSeriesQuery.Query,
|
||||
"query": timeSeriesQuery.parameters.Query,
|
||||
}
|
||||
r, err := s.createRequest(timeSeriesQuery.logger, &dsInfo, p, bytes.NewBuffer([]byte{}))
|
||||
if err != nil {
|
||||
@ -100,7 +100,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context,
|
||||
}
|
||||
for d.NextPageToken != "" {
|
||||
requestBody := map[string]interface{}{
|
||||
"query": timeSeriesQuery.Query,
|
||||
"query": timeSeriesQuery.parameters.Query,
|
||||
"pageToken": d.NextPageToken,
|
||||
}
|
||||
nextPage, err := doRequestQueryPage(timeSeriesQuery.logger, requestBody, r, dsInfo)
|
||||
@ -112,7 +112,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context,
|
||||
d.NextPageToken = nextPage.NextPageToken
|
||||
}
|
||||
|
||||
return dr, d, timeSeriesQuery.Query, nil
|
||||
return dr, d, timeSeriesQuery.parameters.Query, nil
|
||||
}
|
||||
|
||||
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *backend.DataResponse,
|
||||
@ -122,7 +122,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
for _, series := range response.TimeSeriesData {
|
||||
seriesLabels := make(map[string]string)
|
||||
frame := data.NewFrameOfFieldTypes("", len(series.PointData), data.FieldTypeTime, data.FieldTypeFloat64)
|
||||
frame.RefID = timeSeriesQuery.RefID
|
||||
frame.RefID = timeSeriesQuery.refID
|
||||
frame.Meta = &data.FrameMeta{
|
||||
ExecutedQueryString: executedQueryString,
|
||||
}
|
||||
@ -184,8 +184,12 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
}
|
||||
|
||||
metricName := formatLegendKeys(d.Key, defaultMetricName, seriesLabels, nil,
|
||||
&cloudMonitoringTimeSeriesFilter{
|
||||
ProjectName: timeSeriesQuery.ProjectName, AliasBy: timeSeriesQuery.AliasBy, logger: timeSeriesQuery.logger,
|
||||
&cloudMonitoringTimeSeriesList{
|
||||
parameters: &timeSeriesList{
|
||||
ProjectName: timeSeriesQuery.parameters.ProjectName,
|
||||
},
|
||||
aliasBy: timeSeriesQuery.aliasBy,
|
||||
logger: timeSeriesQuery.logger,
|
||||
})
|
||||
dataField := frame.Fields[1]
|
||||
dataField.Name = metricName
|
||||
@ -219,7 +223,11 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{})
|
||||
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{})
|
||||
|
||||
frameName := formatLegendKeys(d.Key, defaultMetricName, nil, additionalLabels, &cloudMonitoringTimeSeriesFilter{ProjectName: timeSeriesQuery.ProjectName, AliasBy: timeSeriesQuery.AliasBy, logger: timeSeriesQuery.logger})
|
||||
frameName := formatLegendKeys(d.Key, defaultMetricName, nil, additionalLabels, &cloudMonitoringTimeSeriesList{
|
||||
parameters: &timeSeriesList{ProjectName: timeSeriesQuery.parameters.ProjectName},
|
||||
aliasBy: timeSeriesQuery.aliasBy,
|
||||
logger: timeSeriesQuery.logger,
|
||||
})
|
||||
valueField.Name = frameName
|
||||
valueField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(valueField)
|
||||
@ -230,7 +238,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
timeField,
|
||||
valueField,
|
||||
},
|
||||
RefID: timeSeriesQuery.RefID,
|
||||
RefID: timeSeriesQuery.refID,
|
||||
}
|
||||
|
||||
if maxKey < i {
|
||||
@ -247,7 +255,11 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
additionalLabels := data.Labels{"bucket": bucketBound}
|
||||
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{})
|
||||
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{})
|
||||
frameName := formatLegendKeys(d.Key, defaultMetricName, seriesLabels, additionalLabels, &cloudMonitoringTimeSeriesFilter{ProjectName: timeSeriesQuery.ProjectName, AliasBy: timeSeriesQuery.AliasBy, logger: timeSeriesQuery.logger})
|
||||
frameName := formatLegendKeys(d.Key, defaultMetricName, seriesLabels, additionalLabels, &cloudMonitoringTimeSeriesList{
|
||||
parameters: &timeSeriesList{ProjectName: timeSeriesQuery.parameters.ProjectName},
|
||||
aliasBy: timeSeriesQuery.aliasBy,
|
||||
logger: timeSeriesQuery.logger,
|
||||
})
|
||||
valueField.Name = frameName
|
||||
valueField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(valueField)
|
||||
@ -258,7 +270,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
timeField,
|
||||
valueField,
|
||||
},
|
||||
RefID: timeSeriesQuery.RefID,
|
||||
RefID: timeSeriesQuery.refID,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -278,7 +290,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
}
|
||||
if len(response.TimeSeriesData) > 0 {
|
||||
dl := timeSeriesQuery.buildDeepLink()
|
||||
frames = addConfigData(frames, dl, response.Unit, timeSeriesQuery.GraphPeriod)
|
||||
frames = addConfigData(frames, dl, response.Unit, timeSeriesQuery.parameters.GraphPeriod)
|
||||
}
|
||||
|
||||
queryRes.Frames = frames
|
||||
@ -289,12 +301,12 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) buildDeepLink() string {
|
||||
u, err := url.Parse("https://console.cloud.google.com/monitoring/metrics-explorer")
|
||||
if err != nil {
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link: unable to parse metrics explorer URL", "projectName", timeSeriesQuery.ProjectName, "query", timeSeriesQuery.RefID)
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link: unable to parse metrics explorer URL", "projectName", timeSeriesQuery.parameters.ProjectName, "query", timeSeriesQuery.refID)
|
||||
return ""
|
||||
}
|
||||
|
||||
q := u.Query()
|
||||
q.Set("project", timeSeriesQuery.ProjectName)
|
||||
q.Set("project", timeSeriesQuery.parameters.ProjectName)
|
||||
q.Set("Grafana_deeplink", "true")
|
||||
|
||||
pageState := map[string]interface{}{
|
||||
@ -302,7 +314,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) buildDeepLink() string {
|
||||
"constantLines": []string{},
|
||||
"dataSets": []map[string]interface{}{
|
||||
{
|
||||
"timeSeriesQuery": timeSeriesQuery.Query,
|
||||
"timeSeriesQuery": timeSeriesQuery.parameters.Query,
|
||||
"targetAxis": "Y1",
|
||||
"plotType": "LINE",
|
||||
},
|
||||
@ -322,7 +334,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) buildDeepLink() string {
|
||||
|
||||
blob, err := json.Marshal(pageState)
|
||||
if err != nil {
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link", "pageState", pageState, "ProjectName", timeSeriesQuery.ProjectName, "query", timeSeriesQuery.RefID)
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link", "pageState", pageState, "ProjectName", timeSeriesQuery.parameters.ProjectName, "query", timeSeriesQuery.refID)
|
||||
return ""
|
||||
}
|
||||
|
||||
@ -331,7 +343,7 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) buildDeepLink() string {
|
||||
|
||||
accountChooserURL, err := url.Parse("https://accounts.google.com/AccountChooser")
|
||||
if err != nil {
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link: unable to parse account chooser URL", "ProjectName", timeSeriesQuery.ProjectName, "query", timeSeriesQuery.RefID)
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link: unable to parse account chooser URL", "ProjectName", timeSeriesQuery.parameters.ProjectName, "query", timeSeriesQuery.refID)
|
||||
return ""
|
||||
}
|
||||
accountChooserQuery := accountChooserURL.Query()
|
||||
@ -342,5 +354,5 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) buildDeepLink() string {
|
||||
}
|
||||
|
||||
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) getRefID() string {
|
||||
return timeSeriesQuery.RefID
|
||||
return timeSeriesQuery.refID
|
||||
}
|
||||
|
@ -21,8 +21,10 @@ func TestTimeSeriesQuery(t *testing.T) {
|
||||
t.Run("and alias template is not specified", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
parameters: &timeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
},
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
@ -37,9 +39,11 @@ func TestTimeSeriesQuery(t *testing.T) {
|
||||
t.Run("and alias template is specified", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
parameters: &timeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
},
|
||||
aliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
@ -62,9 +66,11 @@ func TestTimeSeriesQuery(t *testing.T) {
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
parameters: &timeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
},
|
||||
aliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
@ -85,8 +91,10 @@ func TestTimeSeriesQuery(t *testing.T) {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
parameters: &timeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
},
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
@ -109,13 +117,15 @@ func TestTimeSeriesQuery(t *testing.T) {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
parameters: &timeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
GraphPeriod: "60s",
|
||||
},
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
GraphPeriod: "60s",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
@ -125,12 +135,12 @@ func TestTimeSeriesQuery(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("appends graph_period to the query", func(t *testing.T) {
|
||||
query := &cloudMonitoringTimeSeriesQuery{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{parameters: &timeSeriesQuery{}}
|
||||
assert.Equal(t, query.appendGraphPeriod(&backend.QueryDataRequest{Queries: []backend.DataQuery{{}}}), " | graph_period 1ms")
|
||||
})
|
||||
|
||||
t.Run("skips graph_period if disabled", func(t *testing.T) {
|
||||
query := &cloudMonitoringTimeSeriesQuery{GraphPeriod: "disabled"}
|
||||
query := &cloudMonitoringTimeSeriesQuery{parameters: &timeSeriesQuery{GraphPeriod: "disabled"}}
|
||||
assert.Equal(t, query.appendGraphPeriod(&backend.QueryDataRequest{Queries: []backend.DataQuery{{}}}), "")
|
||||
})
|
||||
}
|
||||
|
@ -20,67 +20,68 @@ type (
|
||||
getRefID() string
|
||||
}
|
||||
|
||||
// Used to build time series filters
|
||||
cloudMonitoringTimeSeriesFilter struct {
|
||||
Target string
|
||||
Params url.Values
|
||||
RefID string
|
||||
GroupBys []string
|
||||
AliasBy string
|
||||
ProjectName string
|
||||
Selector string
|
||||
Service string
|
||||
Slo string
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
// Used to build MQL queries
|
||||
cloudMonitoringTimeSeriesQuery struct {
|
||||
RefID string
|
||||
ProjectName string
|
||||
Query string
|
||||
IntervalMS int64
|
||||
AliasBy string
|
||||
timeRange backend.TimeRange
|
||||
GraphPeriod string
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
metricQuery struct {
|
||||
ProjectName string
|
||||
MetricType string
|
||||
CrossSeriesReducer string
|
||||
AlignmentPeriod string
|
||||
PerSeriesAligner string
|
||||
GroupBys []string
|
||||
Filters []string
|
||||
AliasBy string
|
||||
View string
|
||||
EditorMode string
|
||||
Query string
|
||||
Preprocessor string
|
||||
PreprocessorType preprocessorType
|
||||
GraphPeriod string
|
||||
}
|
||||
|
||||
sloQuery struct {
|
||||
ProjectName string
|
||||
AlignmentPeriod string
|
||||
PerSeriesAligner string
|
||||
AliasBy string
|
||||
SelectorName string
|
||||
ServiceId string
|
||||
SloId string
|
||||
LookbackPeriod string
|
||||
}
|
||||
|
||||
// Plugin API query data request used to generate
|
||||
// a cloudMonitoringTimeSeriesList or cloudMonitoringTimeSeriesQuery
|
||||
grafanaQuery struct {
|
||||
DatasourceId int
|
||||
RefId string
|
||||
QueryType string
|
||||
MetricQuery metricQuery
|
||||
SloQuery sloQuery
|
||||
Type string
|
||||
AliasBy string `json:"aliasBy"`
|
||||
TimeSeriesList *timeSeriesList `json:"timeSeriesList,omitempty"`
|
||||
TimeSeriesQuery *timeSeriesQuery `json:"timeSeriesQuery,omitempty"`
|
||||
// TODO: Merge SloQuery into TimeSeriesList
|
||||
SloQuery *sloQuery `json:"sloQuery,omitempty"`
|
||||
}
|
||||
|
||||
// These should reflect GCM APIs
|
||||
// timeSeries.list https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list
|
||||
timeSeriesList struct {
|
||||
ProjectName string `json:"projectName"`
|
||||
CrossSeriesReducer string `json:"crossSeriesReducer"`
|
||||
AlignmentPeriod string `json:"alignmentPeriod"`
|
||||
PerSeriesAligner string `json:"perSeriesAligner"`
|
||||
GroupBys []string `json:"groupBys"`
|
||||
Filters []string `json:"filters"`
|
||||
View string `json:"view"`
|
||||
// Not part of the GCM API
|
||||
// TODO: Use API fields instead
|
||||
MetricType string `json:"metricType"`
|
||||
Preprocessor string `json:"preprocessor"`
|
||||
}
|
||||
// TODO: sloQuery can be specified as timeSeriesList parameters
|
||||
sloQuery struct {
|
||||
SelectorName string `json:"selectorName"`
|
||||
ServiceId string `json:"serviceId"`
|
||||
SloId string `json:"sloId"`
|
||||
LookbackPeriod string `json:"lookbackPeriod"`
|
||||
}
|
||||
|
||||
// timeSeries.query https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/query
|
||||
timeSeriesQuery struct {
|
||||
ProjectName string `json:"projectName"`
|
||||
Query string `json:"query"`
|
||||
// Not part of the GCM API, will be added to Query
|
||||
GraphPeriod string `json:"graphPeriod"`
|
||||
}
|
||||
|
||||
// Internal structs. Include computed values
|
||||
// cloudMonitoringTimeSeriesList is used to build time series with a filter
|
||||
cloudMonitoringTimeSeriesList struct {
|
||||
refID string
|
||||
aliasBy string
|
||||
logger log.Logger
|
||||
parameters *timeSeriesList
|
||||
// TODO: Merge SloQuery into TimeSeriesList
|
||||
sloQ *sloQuery
|
||||
// Processed properties
|
||||
params url.Values
|
||||
}
|
||||
// cloudMonitoringTimeSeriesQuery is used to build MQL queries
|
||||
cloudMonitoringTimeSeriesQuery struct {
|
||||
refID string
|
||||
aliasBy string
|
||||
logger log.Logger
|
||||
parameters *timeSeriesQuery
|
||||
// Processed properties
|
||||
timeRange backend.TimeRange
|
||||
IntervalMS int64
|
||||
}
|
||||
|
||||
cloudMonitoringBucketOptions struct {
|
||||
|
@ -45,3 +45,10 @@ func addInterval(period string, field *data.Field) error {
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func toString(v interface{}) string {
|
||||
if v == nil {
|
||||
return ""
|
||||
}
|
||||
return v.(string)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user