mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
CloudMonitoring: Ignore min and max aggregation in MQL queries (#41302)
* ignore min and max aggregations in mql queries * refine comment * moving continuation check to the top of the loop * remove empty line * lint issue
This commit is contained in:
parent
07754351b7
commit
a852ddc10d
@ -2,12 +2,8 @@ package cloudmonitoring
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"net/url"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
@ -592,363 +588,6 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Parse cloud monitoring response in the time series format", func(t *testing.T) {
|
||||
t.Run("when data from query aggregated to one time series", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.Len(t, frames, 1)
|
||||
assert.Equal(t, "serviceruntime.googleapis.com/api/request_count", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, 3, frames[0].Fields[1].Len())
|
||||
|
||||
assert.Equal(t, 0.05, frames[0].Fields[1].At(0))
|
||||
assert.Equal(t, time.Unix(int64(1536670020000/1000), 0).UTC(), frames[0].Fields[0].At(0))
|
||||
|
||||
assert.Equal(t, 1.05, frames[0].Fields[1].At(1))
|
||||
assert.Equal(t, time.Unix(int64(1536670080000/1000), 0).UTC(), frames[0].Fields[0].At(1))
|
||||
|
||||
assert.Equal(t, 1.0666666666667, frames[0].Fields[1].At(2))
|
||||
assert.Equal(t, time.Unix(int64(1536670260000/1000), 0).UTC(), frames[0].Fields[0].At(2))
|
||||
})
|
||||
|
||||
t.Run("when data from query with no aggregation", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
|
||||
field := res.Frames[0].Fields[1]
|
||||
assert.Equal(t, 3, field.Len())
|
||||
assert.Equal(t, 9.8566497180145, field.At(0))
|
||||
assert.Equal(t, 9.7323568146676, field.At(1))
|
||||
assert.Equal(t, 9.7730520330369, field.At(2))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1", field.Name)
|
||||
assert.Equal(t, "collector-asia-east-1", field.Labels["metric.label.instance_name"])
|
||||
assert.Equal(t, "asia-east1-a", field.Labels["resource.label.zone"])
|
||||
assert.Equal(t, "grafana-prod", field.Labels["resource.label.project_id"])
|
||||
|
||||
field = res.Frames[1].Fields[1]
|
||||
assert.Equal(t, 3, field.Len())
|
||||
assert.Equal(t, 9.0238475054502, field.At(0))
|
||||
assert.Equal(t, 8.9689492364414, field.At(1))
|
||||
assert.Equal(t, 8.8210971239023, field.At(2))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1", field.Name)
|
||||
assert.Equal(t, "collector-europe-west-1", field.Labels["metric.label.instance_name"])
|
||||
assert.Equal(t, "europe-west1-b", field.Labels["resource.label.zone"])
|
||||
assert.Equal(t, "grafana-prod", field.Labels["resource.label.project_id"])
|
||||
|
||||
field = res.Frames[2].Fields[1]
|
||||
assert.Equal(t, 3, field.Len())
|
||||
assert.Equal(t, 30.829426143318, field.At(0))
|
||||
assert.Equal(t, 30.903974115849, field.At(1))
|
||||
assert.Equal(t, 30.807846801355, field.At(2))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1", field.Name)
|
||||
assert.Equal(t, "collector-us-east-1", field.Labels["metric.label.instance_name"])
|
||||
assert.Equal(t, "us-east1-b", field.Labels["resource.label.zone"])
|
||||
assert.Equal(t, "grafana-prod", field.Labels["resource.label.project_id"])
|
||||
})
|
||||
|
||||
t.Run("when data from query with no aggregation and group bys", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, GroupBys: []string{
|
||||
"metric.label.instance_name", "resource.label.zone",
|
||||
}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, 3, len(frames))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b", frames[2].Fields[1].Name)
|
||||
})
|
||||
|
||||
t.Run("when data from query with no aggregation and alias by", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
|
||||
t.Run("and the alias pattern is for metric type, a metric label and a resource label", func(t *testing.T) {
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, 3, len(frames))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b", frames[2].Fields[1].Name)
|
||||
})
|
||||
|
||||
t.Run("and the alias pattern is for metric name", func(t *testing.T) {
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, 3, len(frames))
|
||||
assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[2].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when data from query is distribution with exponential bounds", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/3-series-response-distribution-exponential.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 11, len(frames))
|
||||
for i := 0; i < 11; i++ {
|
||||
if i == 0 {
|
||||
assert.Equal(t, "0", frames[i].Fields[1].Name)
|
||||
} else {
|
||||
assert.Equal(t, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10), frames[i].Fields[1].Name)
|
||||
}
|
||||
assert.Equal(t, 3, frames[i].Fields[0].Len())
|
||||
}
|
||||
|
||||
assert.Equal(t, time.Unix(int64(1536668940000/1000), 0).UTC(), frames[0].Fields[0].At(0))
|
||||
assert.Equal(t, time.Unix(int64(1536669000000/1000), 0).UTC(), frames[0].Fields[0].At(1))
|
||||
assert.Equal(t, time.Unix(int64(1536669060000/1000), 0).UTC(), frames[0].Fields[0].At(2))
|
||||
|
||||
assert.Equal(t, "0", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "1", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "2", frames[2].Fields[1].Name)
|
||||
assert.Equal(t, "4", frames[3].Fields[1].Name)
|
||||
assert.Equal(t, "8", frames[4].Fields[1].Name)
|
||||
|
||||
assert.Equal(t, float64(1), frames[8].Fields[1].At(0))
|
||||
assert.Equal(t, float64(1), frames[9].Fields[1].At(0))
|
||||
assert.Equal(t, float64(1), frames[10].Fields[1].At(0))
|
||||
assert.Equal(t, float64(0), frames[8].Fields[1].At(1))
|
||||
assert.Equal(t, float64(0), frames[9].Fields[1].At(1))
|
||||
assert.Equal(t, float64(1), frames[10].Fields[1].At(1))
|
||||
assert.Equal(t, float64(0), frames[8].Fields[1].At(2))
|
||||
assert.Equal(t, float64(1), frames[9].Fields[1].At(2))
|
||||
assert.Equal(t, float64(0), frames[10].Fields[1].At(2))
|
||||
})
|
||||
|
||||
t.Run("when data from query is distribution with explicit bounds", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/4-series-response-distribution-explicit.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 33, len(frames))
|
||||
for i := 0; i < 33; i++ {
|
||||
if i == 0 {
|
||||
assert.Equal(t, "0", frames[i].Fields[1].Name)
|
||||
}
|
||||
assert.Equal(t, 2, frames[i].Fields[1].Len())
|
||||
}
|
||||
|
||||
assert.Equal(t, time.Unix(int64(1550859086000/1000), 0).UTC(), frames[0].Fields[0].At(0))
|
||||
assert.Equal(t, time.Unix(int64(1550859146000/1000), 0).UTC(), frames[0].Fields[0].At(1))
|
||||
|
||||
assert.Equal(t, "0", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "0.01", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "0.05", frames[2].Fields[1].Name)
|
||||
assert.Equal(t, "0.1", frames[3].Fields[1].Name)
|
||||
|
||||
assert.Equal(t, float64(381), frames[8].Fields[1].At(0))
|
||||
assert.Equal(t, float64(212), frames[9].Fields[1].At(0))
|
||||
assert.Equal(t, float64(56), frames[10].Fields[1].At(0))
|
||||
assert.Equal(t, float64(375), frames[8].Fields[1].At(1))
|
||||
assert.Equal(t, float64(213), frames[9].Fields[1].At(1))
|
||||
assert.Equal(t, float64(56), frames[10].Fields[1].At(1))
|
||||
})
|
||||
|
||||
t.Run("when data from query returns metadata system labels", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/5-series-response-meta-data.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(res.Frames))
|
||||
|
||||
field := res.Frames[0].Fields[1]
|
||||
assert.Equal(t, "diana-debian9", field.Labels["metadata.system_labels.name"])
|
||||
assert.Equal(t, "value1, value2", field.Labels["metadata.system_labels.test"])
|
||||
assert.Equal(t, "us-west1", field.Labels["metadata.system_labels.region"])
|
||||
assert.Equal(t, "false", field.Labels["metadata.system_labels.spot_instance"])
|
||||
assert.Equal(t, "name1", field.Labels["metadata.user_labels.name"])
|
||||
assert.Equal(t, "region1", field.Labels["metadata.user_labels.region"])
|
||||
|
||||
field = res.Frames[1].Fields[1]
|
||||
assert.Equal(t, "diana-ubuntu1910", field.Labels["metadata.system_labels.name"])
|
||||
assert.Equal(t, "value1, value2, value3", field.Labels["metadata.system_labels.test"])
|
||||
assert.Equal(t, "us-west1", field.Labels["metadata.system_labels.region"])
|
||||
assert.Equal(t, "false", field.Labels["metadata.system_labels.spot_instance"])
|
||||
|
||||
field = res.Frames[2].Fields[1]
|
||||
assert.Equal(t, "premium-plugin-staging", field.Labels["metadata.system_labels.name"])
|
||||
assert.Equal(t, "value1, value2, value4, value5", field.Labels["metadata.system_labels.test"])
|
||||
assert.Equal(t, "us-central1", field.Labels["metadata.system_labels.region"])
|
||||
assert.Equal(t, "true", field.Labels["metadata.system_labels.spot_instance"])
|
||||
assert.Equal(t, "name3", field.Labels["metadata.user_labels.name"])
|
||||
assert.Equal(t, "region3", field.Labels["metadata.user_labels.region"])
|
||||
})
|
||||
|
||||
t.Run("when data from query returns metadata system labels and alias by is defined", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/5-series-response-meta-data.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
|
||||
t.Run("and systemlabel contains key with array of string", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(frames))
|
||||
fmt.Println(frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "value1, value2", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "value1, value2, value3", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "value1, value2, value4, value5", frames[2].Fields[1].Name)
|
||||
})
|
||||
|
||||
t.Run("and systemlabel contains key with array of string2", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test2}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(frames))
|
||||
assert.Equal(t, "testvalue", frames[2].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when data from query returns slo and alias by is defined", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/6-series-response-slo.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{
|
||||
Params: url.Values{},
|
||||
ProjectName: "test-proj",
|
||||
Selector: "select_slo_compliance",
|
||||
Service: "test-service",
|
||||
Slo: "test-slo",
|
||||
AliasBy: "{{project}} - {{service}} - {{slo}} - {{selector}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "test-proj - test-service - test-slo - select_slo_compliance", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when data from query returns slo and alias by is not defined", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/6-series-response-slo.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{
|
||||
Params: url.Values{},
|
||||
ProjectName: "test-proj",
|
||||
Selector: "select_slo_compliance",
|
||||
Service: "test-service",
|
||||
Slo: "test-slo",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "select_slo_compliance(\"projects/test-proj/services/test-service/serviceLevelObjectives/test-slo\")", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Parse cloud monitoring unit", func(t *testing.T) {
|
||||
t.Run("when mapping is found a unit should be specified on the field config", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Bps", frames[0].Fields[1].Config.Unit)
|
||||
})
|
||||
|
||||
t.Run("when mapping is found a unit should be specified on the field config", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "", frames[0].Fields[1].Config.Unit)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when data from query returns MQL and alias by is defined", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/7-series-response-mql.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, len(data.TimeSeries))
|
||||
assert.Equal(t, 1, len(data.TimeSeriesData))
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363 - 200", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when interpolating filter wildcards", func(t *testing.T) {
|
||||
t.Run("and wildcard is used in the beginning and the end of the word", func(t *testing.T) {
|
||||
t.Run("and there's no wildcard in the middle of the word", func(t *testing.T) {
|
||||
@ -1200,19 +839,6 @@ func TestCloudMonitoring(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func loadTestFile(path string) (cloudMonitoringResponse, error) {
|
||||
var data cloudMonitoringResponse
|
||||
|
||||
// Can ignore gosec warning G304 here since it's a test path
|
||||
// nolint:gosec
|
||||
jsonBody, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
err = json.Unmarshal(jsonBody, &data)
|
||||
return data, err
|
||||
}
|
||||
|
||||
func getCloudMonitoringQueriesFromInterface(t *testing.T, qes []cloudMonitoringQueryExecutor) []*cloudMonitoringTimeSeriesFilter {
|
||||
t.Helper()
|
||||
|
||||
|
@ -0,0 +1,144 @@
|
||||
{
|
||||
"timeSeriesDescriptor": {
|
||||
"labelDescriptors": [
|
||||
{
|
||||
"key": "resource.project_id"
|
||||
},
|
||||
{
|
||||
"key": "resource.zone"
|
||||
},
|
||||
{
|
||||
"key": "resource.instance_id"
|
||||
},
|
||||
{
|
||||
"key": "metric.response_code_class",
|
||||
"valueType": "INT64"
|
||||
}
|
||||
],
|
||||
"pointDescriptors": [
|
||||
{
|
||||
"key": "value.usage.min",
|
||||
"valueType": "INT64",
|
||||
"metricKind": "GAUGE",
|
||||
"unit": "By"
|
||||
},
|
||||
{
|
||||
"key": "value.usage.mean",
|
||||
"valueType": "DOUBLE",
|
||||
"metricKind": "GAUGE",
|
||||
"unit": "By"
|
||||
},
|
||||
{
|
||||
"key": "value.usage.max",
|
||||
"valueType": "INT64",
|
||||
"metricKind": "GAUGE",
|
||||
"unit": "By"
|
||||
}
|
||||
]
|
||||
},
|
||||
"timeSeriesData": [
|
||||
{
|
||||
"labelValues": [
|
||||
{
|
||||
"stringValue": "grafana-prod"
|
||||
},
|
||||
{
|
||||
"stringValue": "asia-northeast1-c"
|
||||
},
|
||||
{
|
||||
"stringValue": "6724404429462225363"
|
||||
},
|
||||
{
|
||||
"int64Value": "200"
|
||||
}
|
||||
],
|
||||
"pointData": [
|
||||
{
|
||||
"values": [
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
},
|
||||
{
|
||||
"doubleValue": 943302441.4
|
||||
},
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
}
|
||||
],
|
||||
"timeInterval": {
|
||||
"startTime": "2021-11-03T15:25:45Z",
|
||||
"endTime": "2021-11-03T15:25:45Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"values": [
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
},
|
||||
{
|
||||
"doubleValue": 943294256
|
||||
},
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
}
|
||||
],
|
||||
"timeInterval": {
|
||||
"startTime": "2021-11-03T15:20:45Z",
|
||||
"endTime": "2021-11-03T15:20:45Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"values": [
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
},
|
||||
{
|
||||
"doubleValue": 943294256
|
||||
},
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
}
|
||||
],
|
||||
"timeInterval": {
|
||||
"startTime": "2021-11-03T15:15:45Z",
|
||||
"endTime": "2021-11-03T15:15:45Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"values": [
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
},
|
||||
{
|
||||
"doubleValue": 943294256
|
||||
},
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
}
|
||||
],
|
||||
"timeInterval": {
|
||||
"startTime": "2021-11-03T15:10:45Z",
|
||||
"endTime": "2021-11-03T15:10:45Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"values": [
|
||||
{
|
||||
"int64Value": "943294256"
|
||||
},
|
||||
{
|
||||
"doubleValue": 843302441.9
|
||||
},
|
||||
{
|
||||
"int64Value": "943335208"
|
||||
}
|
||||
],
|
||||
"timeInterval": {
|
||||
"startTime": "2021-11-03T15:05:45Z",
|
||||
"endTime": "2021-11-03T15:05:45Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
386
pkg/tsdb/cloudmonitoring/time_series_filter_test.go
Normal file
386
pkg/tsdb/cloudmonitoring/time_series_filter_test.go
Normal file
@ -0,0 +1,386 @@
|
||||
package cloudmonitoring
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestTimeSeriesFilter(t *testing.T) {
|
||||
t.Run("when data from query aggregated to one time series", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.Len(t, frames, 1)
|
||||
assert.Equal(t, "serviceruntime.googleapis.com/api/request_count", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, 3, frames[0].Fields[1].Len())
|
||||
|
||||
assert.Equal(t, 0.05, frames[0].Fields[1].At(0))
|
||||
assert.Equal(t, time.Unix(int64(1536670020000/1000), 0).UTC(), frames[0].Fields[0].At(0))
|
||||
|
||||
assert.Equal(t, 1.05, frames[0].Fields[1].At(1))
|
||||
assert.Equal(t, time.Unix(int64(1536670080000/1000), 0).UTC(), frames[0].Fields[0].At(1))
|
||||
|
||||
assert.Equal(t, 1.0666666666667, frames[0].Fields[1].At(2))
|
||||
assert.Equal(t, time.Unix(int64(1536670260000/1000), 0).UTC(), frames[0].Fields[0].At(2))
|
||||
})
|
||||
|
||||
t.Run("when data from query with no aggregation", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
|
||||
field := res.Frames[0].Fields[1]
|
||||
assert.Equal(t, 3, field.Len())
|
||||
assert.Equal(t, 9.8566497180145, field.At(0))
|
||||
assert.Equal(t, 9.7323568146676, field.At(1))
|
||||
assert.Equal(t, 9.7730520330369, field.At(2))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1", field.Name)
|
||||
assert.Equal(t, "collector-asia-east-1", field.Labels["metric.label.instance_name"])
|
||||
assert.Equal(t, "asia-east1-a", field.Labels["resource.label.zone"])
|
||||
assert.Equal(t, "grafana-prod", field.Labels["resource.label.project_id"])
|
||||
|
||||
field = res.Frames[1].Fields[1]
|
||||
assert.Equal(t, 3, field.Len())
|
||||
assert.Equal(t, 9.0238475054502, field.At(0))
|
||||
assert.Equal(t, 8.9689492364414, field.At(1))
|
||||
assert.Equal(t, 8.8210971239023, field.At(2))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1", field.Name)
|
||||
assert.Equal(t, "collector-europe-west-1", field.Labels["metric.label.instance_name"])
|
||||
assert.Equal(t, "europe-west1-b", field.Labels["resource.label.zone"])
|
||||
assert.Equal(t, "grafana-prod", field.Labels["resource.label.project_id"])
|
||||
|
||||
field = res.Frames[2].Fields[1]
|
||||
assert.Equal(t, 3, field.Len())
|
||||
assert.Equal(t, 30.829426143318, field.At(0))
|
||||
assert.Equal(t, 30.903974115849, field.At(1))
|
||||
assert.Equal(t, 30.807846801355, field.At(2))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1", field.Name)
|
||||
assert.Equal(t, "collector-us-east-1", field.Labels["metric.label.instance_name"])
|
||||
assert.Equal(t, "us-east1-b", field.Labels["resource.label.zone"])
|
||||
assert.Equal(t, "grafana-prod", field.Labels["resource.label.project_id"])
|
||||
})
|
||||
|
||||
t.Run("when data from query with no aggregation and group bys", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, GroupBys: []string{
|
||||
"metric.label.instance_name", "resource.label.zone",
|
||||
}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, 3, len(frames))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b", frames[2].Fields[1].Name)
|
||||
})
|
||||
|
||||
t.Run("when data from query with no aggregation and alias by", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
|
||||
t.Run("and the alias pattern is for metric type, a metric label and a resource label", func(t *testing.T) {
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, 3, len(frames))
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b", frames[2].Fields[1].Name)
|
||||
})
|
||||
|
||||
t.Run("and the alias pattern is for metric name", func(t *testing.T) {
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, 3, len(frames))
|
||||
assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[2].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when data from query is distribution with exponential bounds", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/3-series-response-distribution-exponential.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 11, len(frames))
|
||||
for i := 0; i < 11; i++ {
|
||||
if i == 0 {
|
||||
assert.Equal(t, "0", frames[i].Fields[1].Name)
|
||||
} else {
|
||||
assert.Equal(t, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10), frames[i].Fields[1].Name)
|
||||
}
|
||||
assert.Equal(t, 3, frames[i].Fields[0].Len())
|
||||
}
|
||||
|
||||
assert.Equal(t, time.Unix(int64(1536668940000/1000), 0).UTC(), frames[0].Fields[0].At(0))
|
||||
assert.Equal(t, time.Unix(int64(1536669000000/1000), 0).UTC(), frames[0].Fields[0].At(1))
|
||||
assert.Equal(t, time.Unix(int64(1536669060000/1000), 0).UTC(), frames[0].Fields[0].At(2))
|
||||
|
||||
assert.Equal(t, "0", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "1", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "2", frames[2].Fields[1].Name)
|
||||
assert.Equal(t, "4", frames[3].Fields[1].Name)
|
||||
assert.Equal(t, "8", frames[4].Fields[1].Name)
|
||||
|
||||
assert.Equal(t, float64(1), frames[8].Fields[1].At(0))
|
||||
assert.Equal(t, float64(1), frames[9].Fields[1].At(0))
|
||||
assert.Equal(t, float64(1), frames[10].Fields[1].At(0))
|
||||
assert.Equal(t, float64(0), frames[8].Fields[1].At(1))
|
||||
assert.Equal(t, float64(0), frames[9].Fields[1].At(1))
|
||||
assert.Equal(t, float64(1), frames[10].Fields[1].At(1))
|
||||
assert.Equal(t, float64(0), frames[8].Fields[1].At(2))
|
||||
assert.Equal(t, float64(1), frames[9].Fields[1].At(2))
|
||||
assert.Equal(t, float64(0), frames[10].Fields[1].At(2))
|
||||
})
|
||||
|
||||
t.Run("when data from query is distribution with explicit bounds", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/4-series-response-distribution-explicit.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 33, len(frames))
|
||||
for i := 0; i < 33; i++ {
|
||||
if i == 0 {
|
||||
assert.Equal(t, "0", frames[i].Fields[1].Name)
|
||||
}
|
||||
assert.Equal(t, 2, frames[i].Fields[1].Len())
|
||||
}
|
||||
|
||||
assert.Equal(t, time.Unix(int64(1550859086000/1000), 0).UTC(), frames[0].Fields[0].At(0))
|
||||
assert.Equal(t, time.Unix(int64(1550859146000/1000), 0).UTC(), frames[0].Fields[0].At(1))
|
||||
|
||||
assert.Equal(t, "0", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "0.01", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "0.05", frames[2].Fields[1].Name)
|
||||
assert.Equal(t, "0.1", frames[3].Fields[1].Name)
|
||||
|
||||
assert.Equal(t, float64(381), frames[8].Fields[1].At(0))
|
||||
assert.Equal(t, float64(212), frames[9].Fields[1].At(0))
|
||||
assert.Equal(t, float64(56), frames[10].Fields[1].At(0))
|
||||
assert.Equal(t, float64(375), frames[8].Fields[1].At(1))
|
||||
assert.Equal(t, float64(213), frames[9].Fields[1].At(1))
|
||||
assert.Equal(t, float64(56), frames[10].Fields[1].At(1))
|
||||
})
|
||||
|
||||
t.Run("when data from query returns metadata system labels", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/5-series-response-meta-data.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(res.Frames))
|
||||
|
||||
field := res.Frames[0].Fields[1]
|
||||
assert.Equal(t, "diana-debian9", field.Labels["metadata.system_labels.name"])
|
||||
assert.Equal(t, "value1, value2", field.Labels["metadata.system_labels.test"])
|
||||
assert.Equal(t, "us-west1", field.Labels["metadata.system_labels.region"])
|
||||
assert.Equal(t, "false", field.Labels["metadata.system_labels.spot_instance"])
|
||||
assert.Equal(t, "name1", field.Labels["metadata.user_labels.name"])
|
||||
assert.Equal(t, "region1", field.Labels["metadata.user_labels.region"])
|
||||
|
||||
field = res.Frames[1].Fields[1]
|
||||
assert.Equal(t, "diana-ubuntu1910", field.Labels["metadata.system_labels.name"])
|
||||
assert.Equal(t, "value1, value2, value3", field.Labels["metadata.system_labels.test"])
|
||||
assert.Equal(t, "us-west1", field.Labels["metadata.system_labels.region"])
|
||||
assert.Equal(t, "false", field.Labels["metadata.system_labels.spot_instance"])
|
||||
|
||||
field = res.Frames[2].Fields[1]
|
||||
assert.Equal(t, "premium-plugin-staging", field.Labels["metadata.system_labels.name"])
|
||||
assert.Equal(t, "value1, value2, value4, value5", field.Labels["metadata.system_labels.test"])
|
||||
assert.Equal(t, "us-central1", field.Labels["metadata.system_labels.region"])
|
||||
assert.Equal(t, "true", field.Labels["metadata.system_labels.spot_instance"])
|
||||
assert.Equal(t, "name3", field.Labels["metadata.user_labels.name"])
|
||||
assert.Equal(t, "region3", field.Labels["metadata.user_labels.region"])
|
||||
})
|
||||
|
||||
t.Run("when data from query returns metadata system labels and alias by is defined", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/5-series-response-meta-data.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
|
||||
t.Run("and systemlabel contains key with array of string", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(frames))
|
||||
fmt.Println(frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "value1, value2", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, "value1, value2, value3", frames[1].Fields[1].Name)
|
||||
assert.Equal(t, "value1, value2, value4, value5", frames[2].Fields[1].Name)
|
||||
})
|
||||
|
||||
t.Run("and systemlabel contains key with array of string2", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test2}}"}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(frames))
|
||||
assert.Equal(t, "testvalue", frames[2].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when data from query returns slo and alias by is defined", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/6-series-response-slo.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{
|
||||
Params: url.Values{},
|
||||
ProjectName: "test-proj",
|
||||
Selector: "select_slo_compliance",
|
||||
Service: "test-service",
|
||||
Slo: "test-slo",
|
||||
AliasBy: "{{project}} - {{service}} - {{slo}} - {{selector}}",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "test-proj - test-service - test-slo - select_slo_compliance", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when data from query returns slo and alias by is not defined", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/6-series-response-slo.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{
|
||||
Params: url.Values{},
|
||||
ProjectName: "test-proj",
|
||||
Selector: "select_slo_compliance",
|
||||
Service: "test-service",
|
||||
Slo: "test-slo",
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "select_slo_compliance(\"projects/test-proj/services/test-service/serviceLevelObjectives/test-slo\")", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Parse cloud monitoring unit", func(t *testing.T) {
|
||||
t.Run("when mapping is found a unit should be specified on the field config", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Bps", frames[0].Fields[1].Config.Unit)
|
||||
})
|
||||
|
||||
t.Run("when mapping is found a unit should be specified on the field config", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3, len(data.TimeSeries))
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "", frames[0].Fields[1].Config.Unit)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("when data from query returns MQL and alias by is defined", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/7-series-response-mql.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, len(data.TimeSeries))
|
||||
assert.Equal(t, 1, len(data.TimeSeriesData))
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363 - 200", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func loadTestFile(path string) (cloudMonitoringResponse, error) {
|
||||
var data cloudMonitoringResponse
|
||||
|
||||
// Can ignore gosec warning G304 here since it's a test path
|
||||
// nolint:gosec
|
||||
jsonBody, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
err = json.Unmarshal(jsonBody, &data)
|
||||
return data, err
|
||||
}
|
@ -59,8 +59,6 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, r
|
||||
span.SetTag("query", timeSeriesQuery.Query)
|
||||
span.SetTag("from", req.Queries[0].TimeRange.From)
|
||||
span.SetTag("until", req.Queries[0].TimeRange.To)
|
||||
span.SetTag("datasource_id", dsInfo.id)
|
||||
span.SetTag("org_id", req.PluginContext.OrgID)
|
||||
|
||||
defer span.Finish()
|
||||
|
||||
@ -125,6 +123,14 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ba
|
||||
}
|
||||
|
||||
for n, d := range response.TimeSeriesDescriptor.PointDescriptors {
|
||||
// If more than 1 pointdescriptor was returned, three aggregations are returned per time series - min, mean and max.
|
||||
// This is a because the period for the given table is less than half the duration which is used in the graph_period MQL function.
|
||||
// See https://cloud.google.com/monitoring/mql/reference#graph_period-tabop
|
||||
// When this is the case, we'll just ignore the min and max and use the mean value in the frame
|
||||
if len(response.TimeSeriesDescriptor.PointDescriptors) > 1 && !strings.HasSuffix(d.Key, ".mean") {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, ok := labels["metric.name"]; !ok {
|
||||
labels["metric.name"] = map[string]bool{}
|
||||
}
|
||||
|
80
pkg/tsdb/cloudmonitoring/time_series_query_test.go
Normal file
80
pkg/tsdb/cloudmonitoring/time_series_query_test.go
Normal file
@ -0,0 +1,80 @@
|
||||
package cloudmonitoring
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestTimeSeriesQuery(t *testing.T) {
|
||||
t.Run("multiple point descriptor is returned", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/8-series-response-mql-multiple-point-descriptors.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, len(data.TimeSeries))
|
||||
assert.Equal(t, 1, len(data.TimeSeriesData))
|
||||
assert.Equal(t, 3, len(data.TimeSeriesDescriptor.PointDescriptors))
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
|
||||
t.Run("and alias template is not specified", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
frames := res.Frames
|
||||
assert.Equal(t, "value.usage.mean", frames[0].Fields[1].Name)
|
||||
assert.Equal(t, 843302441.9, frames[0].Fields[1].At(0))
|
||||
})
|
||||
|
||||
t.Run("and alias template is specified", func(t *testing.T) {
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
frames := res.Frames
|
||||
assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363 - 200", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
t.Run("single point descriptor is returned", func(t *testing.T) {
|
||||
data, err := loadTestFile("./test-data/7-series-response-mql.json")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, len(data.TimeSeries))
|
||||
assert.Equal(t, 1, len(data.TimeSeriesData))
|
||||
assert.Equal(t, 1, len(data.TimeSeriesDescriptor.PointDescriptors))
|
||||
|
||||
t.Run("and alias by is expanded", func(t *testing.T) {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
|
||||
res := &backend.DataResponse{}
|
||||
query := &cloudMonitoringTimeSeriesQuery{
|
||||
ProjectName: "test-proj",
|
||||
Query: "test-query",
|
||||
AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}} - {{metric.label.response_code_class}}",
|
||||
timeRange: backend.TimeRange{
|
||||
From: fromStart,
|
||||
To: fromStart.Add(34 * time.Minute),
|
||||
},
|
||||
}
|
||||
err = query.parseResponse(res, data, "")
|
||||
require.NoError(t, err)
|
||||
frames := res.Frames
|
||||
assert.Equal(t, 1, len(res.Frames))
|
||||
assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363 - 200", frames[0].Fields[1].Name)
|
||||
})
|
||||
})
|
||||
}
|
Loading…
Reference in New Issue
Block a user