Cloudwatch: refactor parsing metric data queries (#57624)

This commit is contained in:
Shirley
2022-10-27 14:10:49 +02:00
committed by GitHub
parent b4633fc516
commit 96beb3a3f1
4 changed files with 1134 additions and 1203 deletions

View File

@@ -2,11 +2,18 @@ package models
import (
"encoding/json"
"errors"
"fmt"
"math"
"net/url"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/google/uuid"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/cloudwatch/cwlog"
)
@@ -182,3 +189,261 @@ func (q *CloudWatchQuery) BuildDeepLink(startTime time.Time, endTime time.Time,
return fmt.Sprintf(`%s#metricsV2:%s`, url.String(), fragment.Encode()), nil
}
const timeSeriesQuery = "timeSeriesQuery"
var validMetricDataID = regexp.MustCompile(`^[a-z][a-zA-Z0-9_]*$`)
type metricsDataQuery struct {
Dimensions map[string]interface{} `json:"dimensions"`
Expression string `json:"expression"`
Label *string `json:"label"`
Id string `json:"id"`
MatchExact *bool `json:"matchExact"`
MetricEditorMode *MetricEditorMode `json:"metricEditorMode"`
MetricName string `json:"metricName"`
MetricQueryType MetricQueryType `json:"metricQueryType"`
Namespace string `json:"namespace"`
Period string `json:"period"`
Region string `json:"region"`
SqlExpression string `json:"sqlExpression"`
Statistic *string `json:"statistic"`
Statistics []*string `json:"statistics"`
TimezoneUTCOffset string `json:"timezoneUTCOffset"`
QueryType string `json:"type"`
Hide *bool `json:"hide"`
Alias string `json:"alias"`
}
// ParseMetricDataQueries decodes the metric data queries json, validates, sets default values and returns an array of CloudWatchQueries.
// The CloudWatchQuery has a 1 to 1 mapping to a query editor row
func ParseMetricDataQueries(dataQueries []backend.DataQuery, startTime time.Time, endTime time.Time, dynamicLabelsEnabled bool) ([]*CloudWatchQuery, error) {
var metricDataQueries = make(map[string]metricsDataQuery)
for _, query := range dataQueries {
var metricsDataQuery metricsDataQuery
err := json.Unmarshal(query.JSON, &metricsDataQuery)
if err != nil {
return nil, &QueryError{Err: err, RefID: query.RefID}
}
queryType := metricsDataQuery.QueryType
if queryType != timeSeriesQuery && queryType != "" {
continue
}
metricDataQueries[query.RefID] = metricsDataQuery
}
var result []*CloudWatchQuery
for refId, mdq := range metricDataQueries {
cwQuery := &CloudWatchQuery{
Alias: mdq.Alias,
RefId: refId,
Id: mdq.Id,
Region: mdq.Region,
Namespace: mdq.Namespace,
MetricName: mdq.MetricName,
MetricQueryType: mdq.MetricQueryType,
SqlExpression: mdq.SqlExpression,
TimezoneUTCOffset: mdq.TimezoneUTCOffset,
Expression: mdq.Expression,
}
if err := cwQuery.validateAndSetDefaults(refId, mdq, startTime, endTime); err != nil {
return nil, &QueryError{Err: err, RefID: refId}
}
cwQuery.migrateLegacyQuery(mdq, dynamicLabelsEnabled)
result = append(result, cwQuery)
}
return result, nil
}
func (q *CloudWatchQuery) migrateLegacyQuery(query metricsDataQuery, dynamicLabelsEnabled bool) {
q.Statistic = getStatistic(query)
q.Label = getLabel(query, dynamicLabelsEnabled)
}
func (q *CloudWatchQuery) validateAndSetDefaults(refId string, metricsDataQuery metricsDataQuery, startTime, endTime time.Time) error {
if metricsDataQuery.Statistic == nil && metricsDataQuery.Statistics == nil {
return fmt.Errorf("query must have either statistic or statistics field")
}
var err error
q.Period, err = getPeriod(metricsDataQuery, startTime, endTime)
if err != nil {
return err
}
q.Dimensions, err = parseDimensions(metricsDataQuery.Dimensions)
if err != nil {
return fmt.Errorf("failed to parse dimensions: %v", err)
}
if metricsDataQuery.Id == "" {
// Why not just use refId if id is not specified in the frontend? When specifying an id in the editor,
// and alphabetical must be used. The id must be unique, so if an id like for example a, b or c would be used,
// it would likely collide with some ref id. That's why the `query` prefix is used.
suffix := refId
if !validMetricDataID.MatchString(suffix) {
newUUID := uuid.NewString()
suffix = strings.Replace(newUUID, "-", "", -1)
}
q.Id = fmt.Sprintf("query%s", suffix)
}
q.MatchExact = true
if metricsDataQuery.MatchExact != nil {
q.MatchExact = *metricsDataQuery.MatchExact
}
q.ReturnData = true
if metricsDataQuery.Hide != nil {
q.ReturnData = !*metricsDataQuery.Hide
}
if metricsDataQuery.QueryType == "" {
// If no type is provided we assume we are called by alerting service, which requires to return data!
// Note, this is sort of a hack, but the official Grafana interfaces do not carry the information
// who (which service) called the TsdbQueryEndpoint.Query(...) function.
q.ReturnData = true
}
if metricsDataQuery.MetricEditorMode == nil && len(metricsDataQuery.Expression) > 0 {
// this should only ever happen if this is an alerting query that has not yet been migrated in the frontend
q.MetricEditorMode = MetricEditorModeRaw
} else {
if metricsDataQuery.MetricEditorMode != nil {
q.MetricEditorMode = *metricsDataQuery.MetricEditorMode
} else {
q.MetricEditorMode = MetricEditorModeBuilder
}
}
return nil
}
// getStatistic determines the value of Statistic in a CloudWatchQuery from the metricsDataQuery input
// migrates queries that has a `statistics` field to use the `statistic` field instead.
// In case the query used more than one stat, the first stat in the slice will be used in the statistic field
// Read more here https://github.com/grafana/grafana/issues/30629
func getStatistic(query metricsDataQuery) string {
// If there's not a statistic property in the json, we know it's the legacy format and then it has to be migrated
if query.Statistic == nil {
return *query.Statistics[0]
}
return *query.Statistic
}
var aliasPatterns = map[string]string{
"metric": `${PROP('MetricName')}`,
"namespace": `${PROP('Namespace')}`,
"period": `${PROP('Period')}`,
"region": `${PROP('Region')}`,
"stat": `${PROP('Stat')}`,
"label": `${LABEL}`,
}
var legacyAliasRegexp = regexp.MustCompile(`{{\s*(.+?)\s*}}`)
func getLabel(query metricsDataQuery, dynamicLabelsEnabled bool) string {
if query.Label != nil {
return *query.Label
}
if query.Alias == "" {
return ""
}
var result string
if dynamicLabelsEnabled {
fullAliasField := query.Alias
matches := legacyAliasRegexp.FindAllStringSubmatch(query.Alias, -1)
for _, groups := range matches {
fullMatch := groups[0]
subgroup := groups[1]
if dynamicLabel, ok := aliasPatterns[subgroup]; ok {
fullAliasField = strings.ReplaceAll(fullAliasField, fullMatch, dynamicLabel)
} else {
fullAliasField = strings.ReplaceAll(fullAliasField, fullMatch, fmt.Sprintf(`${PROP('Dim.%s')}`, subgroup))
}
}
result = fullAliasField
}
return result
}
func getPeriod(query metricsDataQuery, startTime, endTime time.Time) (int, error) {
periodString := query.Period
var period int
var err error
if strings.ToLower(periodString) == "auto" || periodString == "" {
deltaInSeconds := endTime.Sub(startTime).Seconds()
periods := getRetainedPeriods(time.Since(startTime))
datapoints := int(math.Ceil(deltaInSeconds / 2000))
period = periods[len(periods)-1]
for _, value := range periods {
if datapoints <= value {
period = value
break
}
}
} else {
period, err = strconv.Atoi(periodString)
if err != nil {
d, err := time.ParseDuration(periodString)
if err != nil {
return 0, fmt.Errorf("failed to parse period as duration: %v", err)
}
period = int(d.Seconds())
}
}
return period, nil
}
func getRetainedPeriods(timeSince time.Duration) []int {
// See https://aws.amazon.com/about-aws/whats-new/2016/11/cloudwatch-extends-metrics-retention-and-new-user-interface/
if timeSince > time.Duration(455)*24*time.Hour {
return []int{21600, 86400}
} else if timeSince > time.Duration(63)*24*time.Hour {
return []int{3600, 21600, 86400}
} else if timeSince > time.Duration(15)*24*time.Hour {
return []int{300, 900, 3600, 21600, 86400}
} else {
return []int{60, 300, 900, 3600, 21600, 86400}
}
}
func parseDimensions(dimensions map[string]interface{}) (map[string][]string, error) {
parsedDimensions := make(map[string][]string)
for k, v := range dimensions {
// This is for backwards compatibility. Before 6.5 dimensions values were stored as strings and not arrays
if value, ok := v.(string); ok {
parsedDimensions[k] = []string{value}
} else if values, ok := v.([]interface{}); ok {
for _, value := range values {
parsedDimensions[k] = append(parsedDimensions[k], value.(string))
}
} else {
return nil, errors.New("unknown type as dimension value")
}
}
sortedDimensions := sortDimensions(parsedDimensions)
return sortedDimensions, nil
}
func sortDimensions(dimensions map[string][]string) map[string][]string {
sortedDimensions := make(map[string][]string)
var keys []string
for k := range dimensions {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
sortedDimensions[k] = dimensions[k]
}
return sortedDimensions
}

View File

@@ -1,9 +1,12 @@
package models
import (
"encoding/json"
"fmt"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -230,3 +233,869 @@ func TestCloudWatchQuery(t *testing.T) {
assert.False(t, query.IsMathExpression(), "Expected not math expression")
})
}
func TestQueryJSON(t *testing.T) {
jsonString := []byte(`{
"type": "timeSeriesQuery"
}`)
var res metricsDataQuery
err := json.Unmarshal(jsonString, &res)
require.NoError(t, err)
assert.Equal(t, "timeSeriesQuery", res.QueryType)
}
func TestRequestParser(t *testing.T) {
t.Run("legacy statistics field is migrated: migrates first stat only", func(t *testing.T) {
oldQuery := []backend.DataQuery{
{
MaxDataPoints: 0,
QueryType: "timeSeriesQuery",
Interval: 0,
RefID: "A",
JSON: json.RawMessage(`{
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"dimensions":{
"InstanceId": ["test"]
},
"statistics":["Average", "Sum"],
"period":"600",
"hide":false
}`),
},
}
migratedQueries, err := ParseMetricDataQueries(oldQuery, time.Now(), time.Now(), false)
assert.NoError(t, err)
require.Len(t, migratedQueries, 1)
require.NotNil(t, migratedQueries[0])
migratedQuery := migratedQueries[0]
assert.Equal(t, "A", migratedQuery.RefId)
assert.Equal(t, "Average", migratedQuery.Statistic)
})
t.Run("New dimensions structure", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "ref1",
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"id": "",
"expression": "",
"dimensions":{
"InstanceId":["test"],
"InstanceType":["test2","test3"]
},
"statistic":"Average",
"period":"600"
}`),
},
}
results, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, results, 1)
res := results[0]
require.NotNil(t, res)
assert.Equal(t, "us-east-1", res.Region)
assert.Equal(t, "ref1", res.RefId)
assert.Equal(t, "ec2", res.Namespace)
assert.Equal(t, "CPUUtilization", res.MetricName)
assert.Equal(t, "queryref1", res.Id)
assert.Empty(t, res.Expression)
assert.Equal(t, 600, res.Period)
assert.True(t, res.ReturnData)
assert.Len(t, res.Dimensions, 2)
assert.Len(t, res.Dimensions["InstanceId"], 1)
assert.Len(t, res.Dimensions["InstanceType"], 2)
assert.Equal(t, "test3", res.Dimensions["InstanceType"][1])
assert.Equal(t, "Average", res.Statistic)
})
t.Run("Old dimensions structure (backwards compatibility)", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "ref1",
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"id": "",
"expression": "",
"dimensions":{
"InstanceId":["test"],
"InstanceType":["test2"]
},
"statistic":"Average",
"period":"600",
"hide": false
}`),
},
}
results, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
assert.NoError(t, err)
require.Len(t, results, 1)
res := results[0]
require.NotNil(t, res)
assert.Equal(t, "us-east-1", res.Region)
assert.Equal(t, "ref1", res.RefId)
assert.Equal(t, "ec2", res.Namespace)
assert.Equal(t, "CPUUtilization", res.MetricName)
assert.Equal(t, "queryref1", res.Id)
assert.Empty(t, res.Expression)
assert.Equal(t, 600, res.Period)
assert.True(t, res.ReturnData)
assert.Len(t, res.Dimensions, 2)
assert.Len(t, res.Dimensions["InstanceId"], 1)
assert.Len(t, res.Dimensions["InstanceType"], 1)
assert.Equal(t, "test2", res.Dimensions["InstanceType"][0])
assert.Equal(t, "Average", res.Statistic)
})
t.Run("parseDimensions returns error for non-string type dimension value", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"dimensions":{
"InstanceId":3
},
"statistic":"Average"
}`),
},
}
_, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.Error(t, err)
assert.Equal(t, `error parsing query "", failed to parse dimensions: unknown type as dimension value`, err.Error())
})
}
func Test_ParseMetricDataQueries_periods(t *testing.T) {
t.Run("Period defined in the editor by the user is being used when time range is short", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"id": "",
"expression": "",
"dimensions":{
"InstanceId":["test"],
"InstanceType":["test2"]
},
"statistic":"Average",
"period":"900",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
assert.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, 900, res[0].Period)
})
t.Run("Period is parsed correctly if not defined by user", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"id": "",
"expression": "",
"dimensions":{
"InstanceId":["test"],
"InstanceType":["test2"]
},
"statistic":"Average",
"hide":false,
"period":"auto"
}`),
},
}
t.Run("Time range is 5 minutes", func(t *testing.T) {
to := time.Now()
from := to.Local().Add(time.Minute * time.Duration(5))
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 60, res[0].Period)
})
t.Run("Time range is 1 day", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -1)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 60, res[0].Period)
})
t.Run("Time range is 2 days", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -2)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 300, res[0].Period)
})
t.Run("Time range is 7 days", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -7)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 900, res[0].Period)
})
t.Run("Time range is 30 days", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -30)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 3600, res[0].Period)
})
t.Run("Time range is 90 days", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -90)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 21600, res[0].Period)
})
t.Run("Time range is 1 year", func(t *testing.T) {
to := time.Now()
from := to.AddDate(-1, 0, 0)
res, err := ParseMetricDataQueries(query, from, to, false)
require.Nil(t, err)
require.Len(t, res, 1)
assert.Equal(t, 21600, res[0].Period)
})
t.Run("Time range is 2 years", func(t *testing.T) {
to := time.Now()
from := to.AddDate(-2, 0, 0)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 86400, res[0].Period)
})
t.Run("Time range is 2 days, but 16 days ago", func(t *testing.T) {
to := time.Now().AddDate(0, 0, -14)
from := to.AddDate(0, 0, -2)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 300, res[0].Period)
})
t.Run("Time range is 2 days, but 90 days ago", func(t *testing.T) {
to := time.Now().AddDate(0, 0, -88)
from := to.AddDate(0, 0, -2)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 3600, res[0].Period)
})
t.Run("Time range is 2 days, but 456 days ago", func(t *testing.T) {
to := time.Now().AddDate(0, 0, -454)
from := to.AddDate(0, 0, -2)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 21600, res[0].Period)
})
})
t.Run("returns error if period is invalid duration", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"statistic":"Average",
"period":"invalid"
}`),
},
}
_, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.Error(t, err)
assert.Equal(t, `error parsing query "", failed to parse period as duration: time: invalid duration "invalid"`, err.Error())
})
t.Run("returns parsed duration in seconds", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"statistic":"Average",
"period":"2h45m"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
assert.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 9900, res[0].Period)
})
}
func Test_ParseMetricDataQueries_query_type_and_metric_editor_mode_and_GMD_query_api_mode(t *testing.T) {
const dummyTestEditorMode MetricEditorMode = 99
testCases := map[string]struct {
extraDataQueryJson string
expectedMetricQueryType MetricQueryType
expectedMetricEditorMode MetricEditorMode
expectedGMDApiMode GMDApiMode
}{
"no metric query type, no metric editor mode, no expression": {
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: MetricEditorModeBuilder,
expectedGMDApiMode: GMDApiModeMetricStat,
},
"no metric query type, no metric editor mode, has expression": {
extraDataQueryJson: `"expression":"SUM(a)",`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: MetricEditorModeRaw,
expectedGMDApiMode: GMDApiModeMathExpression,
},
"no metric query type, has metric editor mode, has expression": {
extraDataQueryJson: `"expression":"SUM(a)","metricEditorMode":99,`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: dummyTestEditorMode,
expectedGMDApiMode: GMDApiModeMetricStat,
},
"no metric query type, has metric editor mode, no expression": {
extraDataQueryJson: `"metricEditorMode":99,`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: dummyTestEditorMode,
expectedGMDApiMode: GMDApiModeMetricStat,
},
"has metric query type, has metric editor mode, no expression": {
extraDataQueryJson: `"type":"timeSeriesQuery","metricEditorMode":99,`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: dummyTestEditorMode,
expectedGMDApiMode: GMDApiModeMetricStat,
},
"has metric query type, no metric editor mode, has expression": {
extraDataQueryJson: `"type":"timeSeriesQuery","expression":"SUM(a)",`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: MetricEditorModeRaw,
expectedGMDApiMode: GMDApiModeMathExpression,
},
"has metric query type, has metric editor mode, has expression": {
extraDataQueryJson: `"type":"timeSeriesQuery","metricEditorMode":99,"expression":"SUM(a)",`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: dummyTestEditorMode,
expectedGMDApiMode: GMDApiModeMetricStat,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(fmt.Sprintf(
`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
%s
"period":"900"
}`, tc.extraDataQueryJson),
),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, tc.expectedMetricQueryType, res[0].MetricQueryType)
assert.Equal(t, tc.expectedMetricEditorMode, res[0].MetricEditorMode)
assert.Equal(t, tc.expectedGMDApiMode, res[0].GetGMDAPIMode())
})
}
}
func Test_ParseMetricDataQueries_hide_and_ReturnData(t *testing.T) {
t.Run("default: when query type timeSeriesQuery, default ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900",
"type":"timeSeriesQuery"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
t.Run("when query type is timeSeriesQuery, and hide is true, then ReturnData is false", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900",
"type":"timeSeriesQuery",
"hide":true
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.False(t, res[0].ReturnData)
})
t.Run("when query type is timeSeriesQuery, and hide is false, then ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900",
"type":"timeSeriesQuery",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
t.Run("when query type is empty, and hide is empty, then ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
t.Run("when query type is empty, and hide is false, then ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"auto",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
t.Run("when query type is empty, and hide is true, then ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"auto",
"hide":true
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
}
func Test_ParseMetricDataQueries_ID(t *testing.T) {
t.Run("ID is the string `query` appended with refId if refId is a valid MetricData ID", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "ref1",
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "ref1", res[0].RefId)
assert.Equal(t, "queryref1", res[0].Id)
})
t.Run("Valid id is generated if ID is not provided and refId is not a valid MetricData ID", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "$$",
JSON: json.RawMessage(`{
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900",
"refId":"$$"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "$$", res[0].RefId)
assert.Regexp(t, validMetricDataID, res[0].Id)
})
}
func Test_ParseMetricDataQueries_sets_label_when_label_is_present_in_json_query(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"A",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"some alias",
"label":"some label",
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), true)
assert.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "some alias", res[0].Alias) // untouched
assert.Equal(t, "some label", res[0].Label)
}
func Test_migrateAliasToDynamicLabel_single_query_preserves_old_alias_and_creates_new_label(t *testing.T) {
testCases := map[string]struct {
inputAlias string
expectedLabel string
}{
"one known alias pattern: metric": {inputAlias: "{{metric}}", expectedLabel: "${PROP('MetricName')}"},
"one known alias pattern: namespace": {inputAlias: "{{namespace}}", expectedLabel: "${PROP('Namespace')}"},
"one known alias pattern: period": {inputAlias: "{{period}}", expectedLabel: "${PROP('Period')}"},
"one known alias pattern: region": {inputAlias: "{{region}}", expectedLabel: "${PROP('Region')}"},
"one known alias pattern: stat": {inputAlias: "{{stat}}", expectedLabel: "${PROP('Stat')}"},
"one known alias pattern: label": {inputAlias: "{{label}}", expectedLabel: "${LABEL}"},
"one unknown alias pattern becomes dimension": {inputAlias: "{{any_other_word}}", expectedLabel: "${PROP('Dim.any_other_word')}"},
"one known alias pattern with spaces": {inputAlias: "{{ metric }}", expectedLabel: "${PROP('MetricName')}"},
"multiple alias patterns": {inputAlias: "some {{combination }}{{ label}} and {{metric}}", expectedLabel: "some ${PROP('Dim.combination')}${LABEL} and ${PROP('MetricName')}"},
"empty alias still migrates to empty label": {inputAlias: "", expectedLabel: ""},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
average := "Average"
false := false
queryToMigrate := metricsDataQuery{
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Alias: tc.inputAlias,
Dimensions: map[string]interface{}{
"InstanceId": []interface{}{"test"},
},
Statistic: &average,
Period: "600",
Hide: &false,
}
assert.Equal(t, tc.expectedLabel, getLabel(queryToMigrate, true))
})
}
}
func Test_ParseMetricDataQueries_migrate_alias_to_label(t *testing.T) {
t.Run("migrates alias to label when label does not already exist and feature toggle enabled", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: []byte(`{
"refId":"A",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"{{period}} {{any_other_word}}",
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), true)
assert.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "{{period}} {{any_other_word}}", res[0].Alias)
assert.Equal(t, "${PROP('Period')} ${PROP('Dim.any_other_word')}", res[0].Label)
assert.Equal(t, map[string][]string{"InstanceId": {"test"}}, res[0].Dimensions)
assert.Equal(t, true, res[0].ReturnData)
assert.Equal(t, "CPUUtilization", res[0].MetricName)
assert.Equal(t, "ec2", res[0].Namespace)
assert.Equal(t, 600, res[0].Period)
assert.Equal(t, "us-east-1", res[0].Region)
assert.Equal(t, "Average", res[0].Statistic)
})
t.Run("successfully migrates alias to dynamic label for multiple queries", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "A",
JSON: json.RawMessage(`{
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"{{period}} {{any_other_word}}",
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`),
},
{
RefID: "B",
JSON: json.RawMessage(`{
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"{{ label }}",
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), true)
assert.NoError(t, err)
require.Len(t, res, 2)
require.NotNil(t, res[0])
assert.Equal(t, "{{period}} {{any_other_word}}", res[0].Alias)
assert.Equal(t, "${PROP('Period')} ${PROP('Dim.any_other_word')}", res[0].Label)
assert.Equal(t, map[string][]string{"InstanceId": {"test"}}, res[0].Dimensions)
assert.Equal(t, true, res[0].ReturnData)
assert.Equal(t, "CPUUtilization", res[0].MetricName)
assert.Equal(t, "ec2", res[0].Namespace)
assert.Equal(t, 600, res[0].Period)
assert.Equal(t, "us-east-1", res[0].Region)
assert.Equal(t, "Average", res[0].Statistic)
require.NotNil(t, res[1])
assert.Equal(t, "{{ label }}", res[1].Alias)
assert.Equal(t, "${LABEL}", res[1].Label)
assert.Equal(t, map[string][]string{"InstanceId": {"test"}}, res[1].Dimensions)
assert.Equal(t, true, res[1].ReturnData)
assert.Equal(t, "CPUUtilization", res[1].MetricName)
assert.Equal(t, "ec2", res[1].Namespace)
assert.Equal(t, 600, res[1].Period)
assert.Equal(t, "us-east-1", res[1].Region)
assert.Equal(t, "Average", res[1].Statistic)
})
t.Run("does not migrate alias to label", func(t *testing.T) {
testCases := map[string]struct {
labelJson string
dynamicLabelsFeatureToggleEnabled bool
expectedLabel string
}{
"when label already exists, feature toggle enabled": {
labelJson: `"label":"some label",`,
dynamicLabelsFeatureToggleEnabled: true,
expectedLabel: "some label"},
"when label does not exist, feature toggle is disabled": {
labelJson: "",
dynamicLabelsFeatureToggleEnabled: false,
expectedLabel: "",
},
"when label already exists, feature toggle is disabled": {
labelJson: `"label":"some label",`,
dynamicLabelsFeatureToggleEnabled: false,
expectedLabel: "some label"},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(fmt.Sprintf(`{
"refId":"A",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"{{period}} {{any_other_word}}",
%s
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`, tc.labelJson)),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), tc.dynamicLabelsFeatureToggleEnabled)
assert.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "{{period}} {{any_other_word}}", res[0].Alias)
assert.Equal(t, tc.expectedLabel, res[0].Label)
assert.Equal(t, map[string][]string{"InstanceId": {"test"}}, res[0].Dimensions)
assert.Equal(t, true, res[0].ReturnData)
assert.Equal(t, "CPUUtilization", res[0].MetricName)
assert.Equal(t, "ec2", res[0].Namespace)
assert.Equal(t, 600, res[0].Period)
assert.Equal(t, "us-east-1", res[0].Region)
assert.Equal(t, "Average", res[0].Statistic)
})
}
})
}
func Test_ParseMetricDataQueries_statistics_and_query_type_validation_and_MatchExact_initialization(t *testing.T) {
t.Run("requires statistics or statistic field", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte("{}"),
},
}, time.Now(), time.Now(), false)
assert.Error(t, err)
assert.Equal(t, `error parsing query "", query must have either statistic or statistics field`, err.Error())
assert.Nil(t, actual)
})
t.Run("ignores query types which are not timeSeriesQuery", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte(`{"type":"some other type", "statistic":"Average", "matchExact":false}`),
},
}, time.Now(), time.Now(), false)
assert.NoError(t, err)
assert.Empty(t, actual)
})
t.Run("accepts empty query type", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte(`{"statistic":"Average"}`),
},
}, time.Now(), time.Now(), false)
assert.NoError(t, err)
assert.NotEmpty(t, actual)
})
t.Run("sets MatchExact nil to MatchExact true", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte(`{"statistic":"Average"}`),
},
}, time.Now(), time.Now(), false)
assert.NoError(t, err)
assert.Len(t, actual, 1)
assert.NotNil(t, actual[0])
assert.True(t, actual[0].MatchExact)
})
t.Run("sets MatchExact", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte(`{"statistic":"Average","matchExact":false}`),
},
}, time.Now(), time.Now(), false)
assert.NoError(t, err)
assert.Len(t, actual, 1)
assert.NotNil(t, actual[0])
assert.False(t, actual[0].MatchExact)
})
}

View File

@@ -1,309 +0,0 @@
package models
import (
"encoding/json"
"errors"
"fmt"
"math"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/google/uuid"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/cloudwatch/cwlog"
)
const timeSeriesQuery = "timeSeriesQuery"
var validMetricDataID = regexp.MustCompile(`^[a-z][a-zA-Z0-9_]*$`)
type metricsDataQuery struct {
Datasource map[string]string `json:"datasource,omitempty"`
Dimensions map[string]interface{} `json:"dimensions,omitempty"`
Expression string `json:"expression,omitempty"`
Id string `json:"id,omitempty"`
Label *string `json:"label,omitempty"`
MatchExact *bool `json:"matchExact,omitempty"`
MaxDataPoints int `json:"maxDataPoints,omitempty"`
MetricEditorMode *int `json:"metricEditorMode,omitempty"`
MetricName string `json:"metricName,omitempty"`
MetricQueryType MetricQueryType `json:"metricQueryType,omitempty"`
Namespace string `json:"namespace,omitempty"`
Period string `json:"period,omitempty"`
RefId string `json:"refId,omitempty"`
Region string `json:"region,omitempty"`
SqlExpression string `json:"sqlExpression,omitempty"`
Statistic *string `json:"statistic,omitempty"`
Statistics []*string `json:"statistics,omitempty"`
TimezoneUTCOffset string `json:"timezoneUTCOffset,omitempty"`
QueryType string `json:"type,omitempty"`
Hide *bool `json:"hide,omitempty"`
Alias string `json:"alias,omitempty"`
}
// ParseMetricDataQueries decodes the metric data queries json, validates, sets default values and returns an array of CloudWatchQueries.
// The CloudWatchQuery has a 1 to 1 mapping to a query editor row
func ParseMetricDataQueries(queries []backend.DataQuery, startTime time.Time, endTime time.Time, dynamicLabelsEnabled bool) ([]*CloudWatchQuery, error) {
var result []*CloudWatchQuery
migratedQueries, err := migrateLegacyQuery(queries, dynamicLabelsEnabled)
if err != nil {
return nil, err
}
for _, query := range migratedQueries {
var metricsDataQuery metricsDataQuery
err := json.Unmarshal(query.JSON, &metricsDataQuery)
if err != nil {
return nil, &QueryError{Err: err, RefID: query.RefID}
}
queryType := metricsDataQuery.QueryType
if queryType != timeSeriesQuery && queryType != "" {
continue
}
if metricsDataQuery.MatchExact == nil {
trueBooleanValue := true
metricsDataQuery.MatchExact = &trueBooleanValue
}
refID := query.RefID
cwQuery, err := parseRequestQuery(metricsDataQuery, refID, startTime, endTime)
if err != nil {
return nil, &QueryError{Err: err, RefID: refID}
}
result = append(result, cwQuery)
}
return result, nil
}
// migrateLegacyQuery is also done in the frontend, so this should only ever be needed for alerting queries
func migrateLegacyQuery(queries []backend.DataQuery, dynamicLabelsEnabled bool) ([]*backend.DataQuery, error) {
migratedQueries := []*backend.DataQuery{}
for _, q := range queries {
query := q
var queryJson *metricsDataQuery
err := json.Unmarshal(query.JSON, &queryJson)
if err != nil {
return nil, err
}
if err := migrateStatisticsToStatistic(queryJson); err != nil {
return nil, err
}
if queryJson.Label == nil && dynamicLabelsEnabled {
migrateAliasToDynamicLabel(queryJson)
}
query.JSON, err = json.Marshal(queryJson)
if err != nil {
return nil, err
}
migratedQueries = append(migratedQueries, &query)
}
return migratedQueries, nil
}
// migrateStatisticsToStatistic migrates queries that has a `statistics` field to use the `statistic` field instead.
// In case the query used more than one stat, the first stat in the slice will be used in the statistic field
// Read more here https://github.com/grafana/grafana/issues/30629
func migrateStatisticsToStatistic(queryJson *metricsDataQuery) error {
// If there's not a statistic property in the json, we know it's the legacy format and then it has to be migrated
if queryJson.Statistic == nil {
if queryJson.Statistics == nil {
return fmt.Errorf("query must have either statistic or statistics field")
}
queryJson.Statistic = queryJson.Statistics[0]
queryJson.Statistics = nil
}
return nil
}
var aliasPatterns = map[string]string{
"metric": `${PROP('MetricName')}`,
"namespace": `${PROP('Namespace')}`,
"period": `${PROP('Period')}`,
"region": `${PROP('Region')}`,
"stat": `${PROP('Stat')}`,
"label": `${LABEL}`,
}
var legacyAliasRegexp = regexp.MustCompile(`{{\s*(.+?)\s*}}`)
func migrateAliasToDynamicLabel(queryJson *metricsDataQuery) {
fullAliasField := queryJson.Alias
if fullAliasField != "" {
matches := legacyAliasRegexp.FindAllStringSubmatch(fullAliasField, -1)
for _, groups := range matches {
fullMatch := groups[0]
subgroup := groups[1]
if dynamicLabel, ok := aliasPatterns[subgroup]; ok {
fullAliasField = strings.ReplaceAll(fullAliasField, fullMatch, dynamicLabel)
} else {
fullAliasField = strings.ReplaceAll(fullAliasField, fullMatch, fmt.Sprintf(`${PROP('Dim.%s')}`, subgroup))
}
}
}
queryJson.Label = &fullAliasField
}
func parseRequestQuery(dataQuery metricsDataQuery, refId string, startTime time.Time, endTime time.Time) (*CloudWatchQuery, error) {
cwlog.Debug("Parsing request query", "query", dataQuery)
result := CloudWatchQuery{
Alias: dataQuery.Alias,
Label: "",
MatchExact: true,
Statistic: "",
ReturnData: true,
UsedExpression: "",
RefId: refId,
Id: dataQuery.Id,
Region: dataQuery.Region,
Namespace: dataQuery.Namespace,
MetricName: dataQuery.MetricName,
MetricQueryType: dataQuery.MetricQueryType,
SqlExpression: dataQuery.SqlExpression,
TimezoneUTCOffset: dataQuery.TimezoneUTCOffset,
Expression: dataQuery.Expression,
}
reNumber := regexp.MustCompile(`^\d+$`)
dimensions, err := parseDimensions(dataQuery.Dimensions)
if err != nil {
return nil, fmt.Errorf("failed to parse dimensions: %v", err)
}
result.Dimensions = dimensions
p := dataQuery.Period
var period int
if strings.ToLower(p) == "auto" || p == "" {
deltaInSeconds := endTime.Sub(startTime).Seconds()
periods := getRetainedPeriods(time.Since(startTime))
datapoints := int(math.Ceil(deltaInSeconds / 2000))
period = periods[len(periods)-1]
for _, value := range periods {
if datapoints <= value {
period = value
break
}
}
} else {
if reNumber.Match([]byte(p)) {
period, err = strconv.Atoi(p)
if err != nil {
return nil, fmt.Errorf("failed to parse period as integer: %v", err)
}
} else {
d, err := time.ParseDuration(p)
if err != nil {
return nil, fmt.Errorf("failed to parse period as duration: %v", err)
}
period = int(d.Seconds())
}
}
result.Period = period
if dataQuery.Id == "" {
// Why not just use refId if id is not specified in the frontend? When specifying an id in the editor,
// and alphabetical must be used. The id must be unique, so if an id like for example a, b or c would be used,
// it would likely collide with some ref id. That's why the `query` prefix is used.
suffix := refId
if !validMetricDataID.MatchString(suffix) {
newUUID := uuid.NewString()
suffix = strings.Replace(newUUID, "-", "", -1)
}
result.Id = fmt.Sprintf("query%s", suffix)
}
if dataQuery.Hide != nil {
result.ReturnData = !*dataQuery.Hide
}
if dataQuery.QueryType == "" {
// If no type is provided we assume we are called by alerting service, which requires to return data!
// Note, this is sort of a hack, but the official Grafana interfaces do not carry the information
// who (which service) called the TsdbQueryEndpoint.Query(...) function.
result.ReturnData = true
}
if dataQuery.MetricEditorMode == nil && len(dataQuery.Expression) > 0 {
// this should only ever happen if this is an alerting query that has not yet been migrated in the frontend
result.MetricEditorMode = MetricEditorModeRaw
} else {
if dataQuery.MetricEditorMode != nil {
result.MetricEditorMode = MetricEditorMode(*dataQuery.MetricEditorMode)
} else {
result.MetricEditorMode = MetricEditorMode(0)
}
}
if dataQuery.Statistic != nil {
result.Statistic = *dataQuery.Statistic
}
if dataQuery.MatchExact != nil {
result.MatchExact = *dataQuery.MatchExact
}
if dataQuery.Label != nil {
result.Label = *dataQuery.Label
}
return &result, nil
}
func getRetainedPeriods(timeSince time.Duration) []int {
// See https://aws.amazon.com/about-aws/whats-new/2016/11/cloudwatch-extends-metrics-retention-and-new-user-interface/
if timeSince > time.Duration(455)*24*time.Hour {
return []int{21600, 86400}
} else if timeSince > time.Duration(63)*24*time.Hour {
return []int{3600, 21600, 86400}
} else if timeSince > time.Duration(15)*24*time.Hour {
return []int{300, 900, 3600, 21600, 86400}
} else {
return []int{60, 300, 900, 3600, 21600, 86400}
}
}
func parseDimensions(dimensions map[string]interface{}) (map[string][]string, error) {
parsedDimensions := make(map[string][]string)
for k, v := range dimensions {
// This is for backwards compatibility. Before 6.5 dimensions values were stored as strings and not arrays
if value, ok := v.(string); ok {
parsedDimensions[k] = []string{value}
} else if values, ok := v.([]interface{}); ok {
for _, value := range values {
parsedDimensions[k] = append(parsedDimensions[k], value.(string))
}
} else {
return nil, errors.New("unknown type as dimension value")
}
}
sortedDimensions := sortDimensions(parsedDimensions)
return sortedDimensions, nil
}
func sortDimensions(dimensions map[string][]string) map[string][]string {
sortedDimensions := make(map[string][]string)
var keys []string
for k := range dimensions {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
sortedDimensions[k] = dimensions[k]
}
return sortedDimensions
}

View File

@@ -1,894 +0,0 @@
package models
import (
"encoding/json"
"fmt"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestQueryJSON(t *testing.T) {
jsonString := []byte(`{
"type": "timeSeriesQuery"
}`)
var res metricsDataQuery
err := json.Unmarshal(jsonString, &res)
require.NoError(t, err)
assert.Equal(t, "timeSeriesQuery", res.QueryType)
}
func TestRequestParser(t *testing.T) {
t.Run("legacy statistics field is migrated: migrates first stat only", func(t *testing.T) {
oldQuery := []backend.DataQuery{
{
MaxDataPoints: 0,
QueryType: "timeSeriesQuery",
Interval: 0,
RefID: "A",
JSON: json.RawMessage(`{
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"dimensions":{
"InstanceId": ["test"]
},
"statistics":["Average", "Sum"],
"period":"600",
"hide":false
}`),
},
}
migratedQueries, err := ParseMetricDataQueries(oldQuery, time.Now(), time.Now(), false)
assert.NoError(t, err)
require.Len(t, migratedQueries, 1)
require.NotNil(t, migratedQueries[0])
migratedQuery := migratedQueries[0]
assert.Equal(t, "A", migratedQuery.RefId)
assert.Equal(t, "Average", migratedQuery.Statistic)
})
t.Run("New dimensions structure", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "ref1",
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"id": "",
"expression": "",
"dimensions":{
"InstanceId":["test"],
"InstanceType":["test2","test3"]
},
"statistic":"Average",
"period":"600"
}`),
},
}
results, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, results, 1)
res := results[0]
require.NotNil(t, res)
assert.Equal(t, "us-east-1", res.Region)
assert.Equal(t, "ref1", res.RefId)
assert.Equal(t, "ec2", res.Namespace)
assert.Equal(t, "CPUUtilization", res.MetricName)
assert.Equal(t, "queryref1", res.Id)
assert.Empty(t, res.Expression)
assert.Equal(t, 600, res.Period)
assert.True(t, res.ReturnData)
assert.Len(t, res.Dimensions, 2)
assert.Len(t, res.Dimensions["InstanceId"], 1)
assert.Len(t, res.Dimensions["InstanceType"], 2)
assert.Equal(t, "test3", res.Dimensions["InstanceType"][1])
assert.Equal(t, "Average", res.Statistic)
})
t.Run("Old dimensions structure (backwards compatibility)", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "ref1",
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"id": "",
"expression": "",
"dimensions":{
"InstanceId":["test"],
"InstanceType":["test2"]
},
"statistic":"Average",
"period":"600",
"hide": false
}`),
},
}
results, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
assert.NoError(t, err)
require.Len(t, results, 1)
res := results[0]
require.NotNil(t, res)
assert.Equal(t, "us-east-1", res.Region)
assert.Equal(t, "ref1", res.RefId)
assert.Equal(t, "ec2", res.Namespace)
assert.Equal(t, "CPUUtilization", res.MetricName)
assert.Equal(t, "queryref1", res.Id)
assert.Empty(t, res.Expression)
assert.Equal(t, 600, res.Period)
assert.True(t, res.ReturnData)
assert.Len(t, res.Dimensions, 2)
assert.Len(t, res.Dimensions["InstanceId"], 1)
assert.Len(t, res.Dimensions["InstanceType"], 1)
assert.Equal(t, "test2", res.Dimensions["InstanceType"][0])
assert.Equal(t, "Average", res.Statistic)
})
t.Run("parseDimensions returns error for non-string type dimension value", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"dimensions":{
"InstanceId":3
},
"statistic":"Average"
}`),
},
}
_, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.Error(t, err)
assert.Equal(t, `error parsing query "", failed to parse dimensions: unknown type as dimension value`, err.Error())
})
}
func Test_ParseMetricDataQueries_periods(t *testing.T) {
t.Run("Period defined in the editor by the user is being used when time range is short", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"id": "",
"expression": "",
"dimensions":{
"InstanceId":["test"],
"InstanceType":["test2"]
},
"statistic":"Average",
"period":"900",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
assert.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, 900, res[0].Period)
})
t.Run("Period is parsed correctly if not defined by user", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"id": "",
"expression": "",
"dimensions":{
"InstanceId":["test"],
"InstanceType":["test2"]
},
"statistic":"Average",
"hide":false,
"period":"auto"
}`),
},
}
t.Run("Time range is 5 minutes", func(t *testing.T) {
to := time.Now()
from := to.Local().Add(time.Minute * time.Duration(5))
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 60, res[0].Period)
})
t.Run("Time range is 1 day", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -1)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 60, res[0].Period)
})
t.Run("Time range is 2 days", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -2)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 300, res[0].Period)
})
t.Run("Time range is 7 days", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -7)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 900, res[0].Period)
})
t.Run("Time range is 30 days", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -30)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 3600, res[0].Period)
})
t.Run("Time range is 90 days", func(t *testing.T) {
to := time.Now()
from := to.AddDate(0, 0, -90)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 21600, res[0].Period)
})
t.Run("Time range is 1 year", func(t *testing.T) {
to := time.Now()
from := to.AddDate(-1, 0, 0)
res, err := ParseMetricDataQueries(query, from, to, false)
require.Nil(t, err)
require.Len(t, res, 1)
assert.Equal(t, 21600, res[0].Period)
})
t.Run("Time range is 2 years", func(t *testing.T) {
to := time.Now()
from := to.AddDate(-2, 0, 0)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 86400, res[0].Period)
})
t.Run("Time range is 2 days, but 16 days ago", func(t *testing.T) {
to := time.Now().AddDate(0, 0, -14)
from := to.AddDate(0, 0, -2)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 300, res[0].Period)
})
t.Run("Time range is 2 days, but 90 days ago", func(t *testing.T) {
to := time.Now().AddDate(0, 0, -88)
from := to.AddDate(0, 0, -2)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 3600, res[0].Period)
})
t.Run("Time range is 2 days, but 456 days ago", func(t *testing.T) {
to := time.Now().AddDate(0, 0, -454)
from := to.AddDate(0, 0, -2)
res, err := ParseMetricDataQueries(query, from, to, false)
require.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 21600, res[0].Period)
})
})
t.Run("returns error if period is invalid duration", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"statistic":"Average",
"period":"invalid"
}`),
},
}
_, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.Error(t, err)
assert.Equal(t, `error parsing query "", failed to parse period as duration: time: invalid duration "invalid"`, err.Error())
})
t.Run("returns parsed duration in seconds", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"statistic":"Average",
"period":"2h45m"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
assert.NoError(t, err)
require.Len(t, res, 1)
assert.Equal(t, 9900, res[0].Period)
})
}
func Test_ParseMetricDataQueries_query_type_and_metric_editor_mode_and_GMD_query_api_mode(t *testing.T) {
const dummyTestEditorMode MetricEditorMode = 99
testCases := map[string]struct {
extraDataQueryJson string
expectedMetricQueryType MetricQueryType
expectedMetricEditorMode MetricEditorMode
expectedGMDApiMode GMDApiMode
}{
"no metric query type, no metric editor mode, no expression": {
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: MetricEditorModeBuilder,
expectedGMDApiMode: GMDApiModeMetricStat,
},
"no metric query type, no metric editor mode, has expression": {
extraDataQueryJson: `"expression":"SUM(a)",`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: MetricEditorModeRaw,
expectedGMDApiMode: GMDApiModeMathExpression,
},
"no metric query type, has metric editor mode, has expression": {
extraDataQueryJson: `"expression":"SUM(a)","metricEditorMode":99,`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: dummyTestEditorMode,
expectedGMDApiMode: GMDApiModeMetricStat,
},
"no metric query type, has metric editor mode, no expression": {
extraDataQueryJson: `"metricEditorMode":99,`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: dummyTestEditorMode,
expectedGMDApiMode: GMDApiModeMetricStat,
},
"has metric query type, has metric editor mode, no expression": {
extraDataQueryJson: `"type":"timeSeriesQuery","metricEditorMode":99,`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: dummyTestEditorMode,
expectedGMDApiMode: GMDApiModeMetricStat,
},
"has metric query type, no metric editor mode, has expression": {
extraDataQueryJson: `"type":"timeSeriesQuery","expression":"SUM(a)",`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: MetricEditorModeRaw,
expectedGMDApiMode: GMDApiModeMathExpression,
},
"has metric query type, has metric editor mode, has expression": {
extraDataQueryJson: `"type":"timeSeriesQuery","metricEditorMode":99,"expression":"SUM(a)",`,
expectedMetricQueryType: MetricQueryTypeSearch,
expectedMetricEditorMode: dummyTestEditorMode,
expectedGMDApiMode: GMDApiModeMetricStat,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(fmt.Sprintf(
`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
%s
"period":"900"
}`, tc.extraDataQueryJson),
),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, tc.expectedMetricQueryType, res[0].MetricQueryType)
assert.Equal(t, tc.expectedMetricEditorMode, res[0].MetricEditorMode)
assert.Equal(t, tc.expectedGMDApiMode, res[0].GetGMDAPIMode())
})
}
}
func Test_ParseMetricDataQueries_hide_and_ReturnData(t *testing.T) {
t.Run("default: when query type timeSeriesQuery, default ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900",
"type":"timeSeriesQuery"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
t.Run("when query type is timeSeriesQuery, and hide is true, then ReturnData is false", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900",
"type":"timeSeriesQuery",
"hide":true
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.False(t, res[0].ReturnData)
})
t.Run("when query type is timeSeriesQuery, and hide is false, then ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900",
"type":"timeSeriesQuery",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
t.Run("when query type is empty, and hide is empty, then ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
t.Run("when query type is empty, and hide is false, then ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"auto",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
t.Run("when query type is empty, and hide is true, then ReturnData is true", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"auto",
"hide":true
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
require.True(t, res[0].ReturnData)
})
}
func Test_ParseMetricDataQueries_ID(t *testing.T) {
t.Run("ID is the string `query` appended with refId if refId is a valid MetricData ID", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "ref1",
JSON: json.RawMessage(`{
"refId":"ref1",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "ref1", res[0].RefId)
assert.Equal(t, "queryref1", res[0].Id)
})
t.Run("Valid id is generated if ID is not provided and refId is not a valid MetricData ID", func(t *testing.T) {
query := []backend.DataQuery{
{
RefID: "$$",
JSON: json.RawMessage(`{
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"statistic":"Average",
"period":"900",
"refId":"$$"
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), false)
require.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "$$", res[0].RefId)
assert.Regexp(t, validMetricDataID, res[0].Id)
})
}
func Test_ParseMetricDataQueries_sets_label_when_label_is_present_in_json_query(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"A",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"some alias",
"label":"some label",
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), true)
assert.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "some alias", res[0].Alias) // untouched
assert.Equal(t, "some label", res[0].Label)
}
func Test_migrateAliasToDynamicLabel_single_query_preserves_old_alias_and_creates_new_label(t *testing.T) {
testCases := map[string]struct {
inputAlias string
expectedLabel string
}{
"one known alias pattern: metric": {inputAlias: "{{metric}}", expectedLabel: "${PROP('MetricName')}"},
"one known alias pattern: namespace": {inputAlias: "{{namespace}}", expectedLabel: "${PROP('Namespace')}"},
"one known alias pattern: period": {inputAlias: "{{period}}", expectedLabel: "${PROP('Period')}"},
"one known alias pattern: region": {inputAlias: "{{region}}", expectedLabel: "${PROP('Region')}"},
"one known alias pattern: stat": {inputAlias: "{{stat}}", expectedLabel: "${PROP('Stat')}"},
"one known alias pattern: label": {inputAlias: "{{label}}", expectedLabel: "${LABEL}"},
"one unknown alias pattern becomes dimension": {inputAlias: "{{any_other_word}}", expectedLabel: "${PROP('Dim.any_other_word')}"},
"one known alias pattern with spaces": {inputAlias: "{{ metric }}", expectedLabel: "${PROP('MetricName')}"},
"multiple alias patterns": {inputAlias: "some {{combination }}{{ label}} and {{metric}}", expectedLabel: "some ${PROP('Dim.combination')}${LABEL} and ${PROP('MetricName')}"},
"empty alias still migrates to empty label": {inputAlias: "", expectedLabel: ""},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
average := "Average"
false := false
queryToMigrate := metricsDataQuery{
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Alias: tc.inputAlias,
Dimensions: map[string]interface{}{
"InstanceId": []interface{}{"test"},
},
Statistic: &average,
Period: "600",
Hide: &false,
}
migrateAliasToDynamicLabel(&queryToMigrate)
expected := metricsDataQuery{
Alias: tc.inputAlias,
Dimensions: map[string]interface{}{
"InstanceId": []interface{}{"test"},
},
Hide: &false,
Label: &tc.expectedLabel,
MetricName: "CPUUtilization",
Namespace: "ec2",
Period: "600",
Region: "us-east-1",
Statistic: &average,
}
assert.Equal(t, expected, queryToMigrate)
})
}
}
func Test_ParseMetricDataQueries_migrate_alias_to_label(t *testing.T) {
t.Run("migrates alias to label when label does not already exist and feature toggle enabled", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: []byte(`{
"refId":"A",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"{{period}} {{any_other_word}}",
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), true)
assert.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "{{period}} {{any_other_word}}", res[0].Alias)
assert.Equal(t, "${PROP('Period')} ${PROP('Dim.any_other_word')}", res[0].Label)
assert.Equal(t, map[string][]string{"InstanceId": {"test"}}, res[0].Dimensions)
assert.Equal(t, true, res[0].ReturnData)
assert.Equal(t, "CPUUtilization", res[0].MetricName)
assert.Equal(t, "ec2", res[0].Namespace)
assert.Equal(t, 600, res[0].Period)
assert.Equal(t, "us-east-1", res[0].Region)
assert.Equal(t, "Average", res[0].Statistic)
})
t.Run("successfully migrates alias to dynamic label for multiple queries", func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(`{
"refId":"A",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"{{period}} {{any_other_word}}",
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`),
},
{
JSON: json.RawMessage(`{
"refId":"A",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"{{ label }}",
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), true)
assert.NoError(t, err)
require.Len(t, res, 2)
require.NotNil(t, res[0])
assert.Equal(t, "{{period}} {{any_other_word}}", res[0].Alias)
assert.Equal(t, "${PROP('Period')} ${PROP('Dim.any_other_word')}", res[0].Label)
assert.Equal(t, map[string][]string{"InstanceId": {"test"}}, res[0].Dimensions)
assert.Equal(t, true, res[0].ReturnData)
assert.Equal(t, "CPUUtilization", res[0].MetricName)
assert.Equal(t, "ec2", res[0].Namespace)
assert.Equal(t, 600, res[0].Period)
assert.Equal(t, "us-east-1", res[0].Region)
assert.Equal(t, "Average", res[0].Statistic)
require.NotNil(t, res[1])
assert.Equal(t, "{{ label }}", res[1].Alias)
assert.Equal(t, "${LABEL}", res[1].Label)
assert.Equal(t, map[string][]string{"InstanceId": {"test"}}, res[1].Dimensions)
assert.Equal(t, true, res[1].ReturnData)
assert.Equal(t, "CPUUtilization", res[1].MetricName)
assert.Equal(t, "ec2", res[1].Namespace)
assert.Equal(t, 600, res[1].Period)
assert.Equal(t, "us-east-1", res[1].Region)
assert.Equal(t, "Average", res[1].Statistic)
})
t.Run("does not migrate alias to label", func(t *testing.T) {
testCases := map[string]struct {
labelJson string
dynamicLabelsFeatureToggleEnabled bool
expectedLabel string
}{
"when label already exists, feature toggle enabled": {
labelJson: `"label":"some label",`,
dynamicLabelsFeatureToggleEnabled: true,
expectedLabel: "some label"},
"when label does not exist, feature toggle is disabled": {
labelJson: "",
dynamicLabelsFeatureToggleEnabled: false,
expectedLabel: "",
},
"when label already exists, feature toggle is disabled": {
labelJson: `"label":"some label",`,
dynamicLabelsFeatureToggleEnabled: false,
expectedLabel: "some label"},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
query := []backend.DataQuery{
{
JSON: json.RawMessage(fmt.Sprintf(`{
"refId":"A",
"region":"us-east-1",
"namespace":"ec2",
"metricName":"CPUUtilization",
"alias":"{{period}} {{any_other_word}}",
%s
"dimensions":{"InstanceId":["test"]},
"statistic":"Average",
"period":"600",
"hide":false
}`, tc.labelJson)),
},
}
res, err := ParseMetricDataQueries(query, time.Now(), time.Now(), tc.dynamicLabelsFeatureToggleEnabled)
assert.NoError(t, err)
require.Len(t, res, 1)
require.NotNil(t, res[0])
assert.Equal(t, "{{period}} {{any_other_word}}", res[0].Alias)
assert.Equal(t, tc.expectedLabel, res[0].Label)
assert.Equal(t, map[string][]string{"InstanceId": {"test"}}, res[0].Dimensions)
assert.Equal(t, true, res[0].ReturnData)
assert.Equal(t, "CPUUtilization", res[0].MetricName)
assert.Equal(t, "ec2", res[0].Namespace)
assert.Equal(t, 600, res[0].Period)
assert.Equal(t, "us-east-1", res[0].Region)
assert.Equal(t, "Average", res[0].Statistic)
})
}
})
}
func Test_ParseMetricDataQueries_statistics_and_query_type_validation_and_MatchExact_initialization(t *testing.T) {
t.Run("requires statistics or statistic field", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte("{}"),
},
}, time.Now(), time.Now(), false)
assert.Error(t, err)
assert.Equal(t, "query must have either statistic or statistics field", err.Error())
assert.Nil(t, actual)
})
t.Run("ignores query types which are not timeSeriesQuery", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte(`{"type":"some other type", "statistic":"Average", "matchExact":false}`),
},
}, time.Now(), time.Now(), false)
assert.NoError(t, err)
assert.Empty(t, actual)
})
t.Run("accepts empty query type", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte(`{"statistic":"Average"}`),
},
}, time.Now(), time.Now(), false)
assert.NoError(t, err)
assert.NotEmpty(t, actual)
})
t.Run("sets MatchExact nil to MatchExact true", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte(`{"statistic":"Average"}`),
},
}, time.Now(), time.Now(), false)
assert.NoError(t, err)
assert.Len(t, actual, 1)
assert.NotNil(t, actual[0])
assert.True(t, actual[0].MatchExact)
})
t.Run("sets MatchExact", func(t *testing.T) {
actual, err := ParseMetricDataQueries(
[]backend.DataQuery{
{
JSON: []byte(`{"statistic":"Average","matchExact":false}`),
},
}, time.Now(), time.Now(), false)
assert.NoError(t, err)
assert.Len(t, actual, 1)
assert.NotNil(t, actual[0])
assert.False(t, actual[0].MatchExact)
})
}