Cloudwatch: Migrate queries that use multiple stats to one query per stat (#36925)

* migrate queries that use multiple stats - squash commits

* fix typo
This commit is contained in:
Erik Sundell 2021-09-08 16:06:43 +02:00 committed by GitHub
parent ae9343f8ae
commit 5e38b02f94
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 2304 additions and 1493 deletions

View File

@ -21,7 +21,7 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, model *
namespace := model.Get("namespace").MustString("")
metricName := model.Get("metricName").MustString("")
dimensions := model.Get("dimensions").MustMap()
statistics := parseStatistics(model)
statistic := model.Get("statistic").MustString()
period := int64(model.Get("period").MustInt(0))
if period == 0 && !usePrefixMatch {
period = 300
@ -45,9 +45,9 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, model *
if err != nil {
return nil, errutil.Wrap("failed to call cloudwatch:DescribeAlarms", err)
}
alarmNames = filterAlarms(resp, namespace, metricName, dimensions, statistics, period)
alarmNames = filterAlarms(resp, namespace, metricName, dimensions, statistic, period)
} else {
if region == "" || namespace == "" || metricName == "" || len(statistics) == 0 {
if region == "" || namespace == "" || metricName == "" || statistic == "" {
return result, errors.New("invalid annotations query")
}
@ -64,21 +64,19 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, model *
}
}
}
for _, s := range statistics {
params := &cloudwatch.DescribeAlarmsForMetricInput{
Namespace: aws.String(namespace),
MetricName: aws.String(metricName),
Dimensions: qd,
Statistic: aws.String(s),
Period: aws.Int64(period),
}
resp, err := cli.DescribeAlarmsForMetric(params)
if err != nil {
return nil, errutil.Wrap("failed to call cloudwatch:DescribeAlarmsForMetric", err)
}
for _, alarm := range resp.MetricAlarms {
alarmNames = append(alarmNames, alarm.AlarmName)
}
params := &cloudwatch.DescribeAlarmsForMetricInput{
Namespace: aws.String(namespace),
MetricName: aws.String(metricName),
Dimensions: qd,
Statistic: aws.String(statistic),
Period: aws.Int64(period),
}
resp, err := cli.DescribeAlarmsForMetric(params)
if err != nil {
return nil, errutil.Wrap("failed to call cloudwatch:DescribeAlarmsForMetric", err)
}
for _, alarm := range resp.MetricAlarms {
alarmNames = append(alarmNames, alarm.AlarmName)
}
}
@ -133,7 +131,7 @@ func transformAnnotationToTable(annotations []map[string]string, query backend.D
}
func filterAlarms(alarms *cloudwatch.DescribeAlarmsOutput, namespace string, metricName string,
dimensions map[string]interface{}, statistics []string, period int64) []*string {
dimensions map[string]interface{}, statistic string, period int64) []*string {
alarmNames := make([]*string, 0)
for _, alarm := range alarms.MetricAlarms {
@ -144,33 +142,24 @@ func filterAlarms(alarms *cloudwatch.DescribeAlarmsOutput, namespace string, met
continue
}
match := true
matchDimension := true
if len(dimensions) != 0 {
if len(alarm.Dimensions) != len(dimensions) {
match = false
matchDimension = false
} else {
for _, d := range alarm.Dimensions {
if _, ok := dimensions[*d.Name]; !ok {
match = false
matchDimension = false
}
}
}
}
if !match {
if !matchDimension {
continue
}
if len(statistics) != 0 {
found := false
for _, s := range statistics {
if *alarm.Statistic == s {
found = true
break
}
}
if !found {
continue
}
if *alarm.Statistic != statistic {
continue
}
if period != 0 && *alarm.Period != period {

View File

@ -1,24 +1,27 @@
package cloudwatch
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"time"
)
type cloudWatchQuery struct {
RefId string
Region string
Id string
Namespace string
MetricName string
Stats string
Expression string
ReturnData bool
Dimensions map[string][]string
Period int
Alias string
MatchExact bool
UsedExpression string
RequestExceededMaxLimit bool
RefId string
Region string
Id string
Namespace string
MetricName string
Statistic string
Expression string
ReturnData bool
Dimensions map[string][]string
Period int
Alias string
MatchExact bool
UsedExpression string
}
func (q *cloudWatchQuery) isMathExpression() bool {
@ -69,3 +72,51 @@ func (q *cloudWatchQuery) isMultiValuedDimensionExpression() bool {
return false
}
func (q *cloudWatchQuery) buildDeepLink(startTime time.Time, endTime time.Time) (string, error) {
if q.isMathExpression() {
return "", nil
}
link := &cloudWatchLink{
Title: q.RefId,
View: "timeSeries",
Stacked: false,
Region: q.Region,
Start: startTime.UTC().Format(time.RFC3339),
End: endTime.UTC().Format(time.RFC3339),
}
if q.isSearchExpression() {
link.Metrics = []interface{}{&metricExpression{Expression: q.UsedExpression}}
} else {
metricStat := []interface{}{q.Namespace, q.MetricName}
for dimensionKey, dimensionValues := range q.Dimensions {
metricStat = append(metricStat, dimensionKey, dimensionValues[0])
}
metricStat = append(metricStat, &metricStatMeta{
Stat: q.Statistic,
Period: q.Period,
})
link.Metrics = []interface{}{metricStat}
}
linkProps, err := json.Marshal(link)
if err != nil {
return "", fmt.Errorf("could not marshal link: %w", err)
}
url, err := url.Parse(fmt.Sprintf(`https://%s.console.aws.amazon.com/cloudwatch/deeplink.js`, q.Region))
if err != nil {
return "", fmt.Errorf("unable to parse CloudWatch console deep link")
}
fragment := url.Query()
fragment.Set("graph", string(linkProps))
query := url.Query()
query.Set("region", q.Region)
url.RawQuery = query.Encode()
return fmt.Sprintf(`%s#metricsV2:%s`, url.String(), fragment.Encode()), nil
}

View File

@ -12,7 +12,7 @@ func TestCloudWatchQuery(t *testing.T) {
RefId: "A",
Region: "us-east-1",
Expression: "SEARCH(someexpression)",
Stats: "Average",
Statistic: "Average",
Period: 300,
Id: "id1",
}
@ -26,7 +26,7 @@ func TestCloudWatchQuery(t *testing.T) {
RefId: "A",
Region: "us-east-1",
Expression: "",
Stats: "Average",
Statistic: "Average",
Period: 300,
Id: "id1",
MatchExact: true,
@ -44,7 +44,7 @@ func TestCloudWatchQuery(t *testing.T) {
RefId: "A",
Region: "us-east-1",
Expression: "",
Stats: "Average",
Statistic: "Average",
Period: 300,
Id: "id1",
Dimensions: map[string][]string{
@ -61,7 +61,7 @@ func TestCloudWatchQuery(t *testing.T) {
RefId: "A",
Region: "us-east-1",
Expression: "",
Stats: "Average",
Statistic: "Average",
Period: 300,
Id: "id1",
Dimensions: map[string][]string{
@ -79,7 +79,7 @@ func TestCloudWatchQuery(t *testing.T) {
RefId: "A",
Region: "us-east-1",
Expression: "",
Stats: "Average",
Statistic: "Average",
Period: 300,
Id: "id1",
Dimensions: map[string][]string{
@ -97,7 +97,7 @@ func TestCloudWatchQuery(t *testing.T) {
RefId: "A",
Region: "us-east-1",
Expression: "",
Stats: "Average",
Statistic: "Average",
Period: 300,
Id: "id1",
MatchExact: false,
@ -123,7 +123,7 @@ func TestCloudWatchQuery(t *testing.T) {
RefId: "A",
Region: "us-east-1",
Expression: "",
Stats: "Average",
Statistic: "Average",
Period: 300,
Id: "id1",
MatchExact: false,

View File

@ -8,7 +8,7 @@ import (
)
func (e *cloudWatchExecutor) buildMetricDataInput(startTime time.Time, endTime time.Time,
queries map[string]*cloudWatchQuery) (*cloudwatch.GetMetricDataInput, error) {
queries []*cloudWatchQuery) (*cloudwatch.GetMetricDataInput, error) {
metricDataInput := &cloudwatch.GetMetricDataInput{
StartTime: aws.Time(startTime),
EndTime: aws.Time(endTime),

View File

@ -20,7 +20,7 @@ func (e *cloudWatchExecutor) buildMetricDataQuery(query *cloudWatchQuery) (*clou
mdq.Expression = aws.String(query.Expression)
} else {
if query.isSearchExpression() {
mdq.Expression = aws.String(buildSearchExpression(query, query.Stats))
mdq.Expression = aws.String(buildSearchExpression(query, query.Statistic))
} else {
mdq.MetricStat = &cloudwatch.MetricStat{
Metric: &cloudwatch.Metric{
@ -37,7 +37,7 @@ func (e *cloudWatchExecutor) buildMetricDataQuery(query *cloudWatchQuery) (*clou
Value: aws.String(values[0]),
})
}
mdq.MetricStat.Stat = aws.String(query.Stats)
mdq.MetricStat.Stat = aws.String(query.Statistic)
}
}

View File

@ -0,0 +1,49 @@
package cloudwatch
import "github.com/aws/aws-sdk-go/service/cloudwatch"
// queryRowResponse represents the GetMetricData response for a query row in the query editor.
type queryRowResponse struct {
ID string
RequestExceededMaxLimit bool
PartialData bool
Labels []string
HasArithmeticError bool
ArithmeticErrorMessage string
Metrics map[string]*cloudwatch.MetricDataResult
StatusCode string
}
func newQueryRowResponse(id string) queryRowResponse {
return queryRowResponse{
ID: id,
RequestExceededMaxLimit: false,
PartialData: false,
HasArithmeticError: false,
ArithmeticErrorMessage: "",
Labels: []string{},
Metrics: map[string]*cloudwatch.MetricDataResult{},
}
}
func (q *queryRowResponse) addMetricDataResult(mdr *cloudwatch.MetricDataResult) {
label := *mdr.Label
q.Labels = append(q.Labels, label)
q.Metrics[label] = mdr
q.StatusCode = *mdr.StatusCode
}
func (q *queryRowResponse) appendTimeSeries(mdr *cloudwatch.MetricDataResult) {
if _, exists := q.Metrics[*mdr.Label]; !exists {
q.Metrics[*mdr.Label] = &cloudwatch.MetricDataResult{}
}
metric := q.Metrics[*mdr.Label]
metric.Timestamps = append(metric.Timestamps, mdr.Timestamps...)
metric.Values = append(metric.Values, mdr.Values...)
q.StatusCode = *mdr.StatusCode
}
func (q *queryRowResponse) addArithmeticError(message *string) {
q.HasArithmeticError = true
q.ArithmeticErrorMessage = *message
}

View File

@ -1,236 +0,0 @@
package cloudwatch
import (
"encoding/json"
"fmt"
"net/url"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// returns a map of queries with query id as key. In the case a q request query
// has more than one statistic defined, one cloudwatchQuery will be created for each statistic.
// If the query doesn't have an Id defined by the user, we'll give it an with format `query[RefId]`. In the case
// the incoming query had more than one stat, it will ge an id like `query[RefId]_[StatName]`, eg queryC_Average
func (e *cloudWatchExecutor) transformRequestQueriesToCloudWatchQueries(requestQueries []*requestQuery) (
map[string]*cloudWatchQuery, error) {
plog.Debug("Transforming CloudWatch request queries")
cloudwatchQueries := make(map[string]*cloudWatchQuery)
for _, requestQuery := range requestQueries {
for _, stat := range requestQuery.Statistics {
id := requestQuery.Id
if id == "" {
id = fmt.Sprintf("query%s", requestQuery.RefId)
}
if len(requestQuery.Statistics) > 1 {
id = fmt.Sprintf("%s_%v", id, strings.ReplaceAll(*stat, ".", "_"))
}
if _, ok := cloudwatchQueries[id]; ok {
return nil, fmt.Errorf("error in query %q - query ID %q is not unique", requestQuery.RefId, id)
}
query := &cloudWatchQuery{
Id: id,
RefId: requestQuery.RefId,
Region: requestQuery.Region,
Namespace: requestQuery.Namespace,
MetricName: requestQuery.MetricName,
Dimensions: requestQuery.Dimensions,
Stats: *stat,
Period: requestQuery.Period,
Alias: requestQuery.Alias,
Expression: requestQuery.Expression,
ReturnData: requestQuery.ReturnData,
MatchExact: requestQuery.MatchExact,
}
cloudwatchQueries[id] = query
}
}
return cloudwatchQueries, nil
}
func (e *cloudWatchExecutor) transformQueryResponsesToQueryResult(cloudwatchResponses []*cloudwatchResponse, requestQueries []*requestQuery, startTime time.Time, endTime time.Time) (map[string]*backend.DataResponse, error) {
responsesByRefID := make(map[string][]*cloudwatchResponse)
refIDs := sort.StringSlice{}
for _, res := range cloudwatchResponses {
refIDs = append(refIDs, res.RefId)
responsesByRefID[res.RefId] = append(responsesByRefID[res.RefId], res)
}
// Ensure stable results
refIDs.Sort()
results := make(map[string]*backend.DataResponse)
for _, refID := range refIDs {
responses := responsesByRefID[refID]
queryResult := backend.DataResponse{}
frames := make(data.Frames, 0, len(responses))
requestExceededMaxLimit := false
partialData := false
var executedQueries []executedQuery
for _, response := range responses {
frames = append(frames, response.DataFrames...)
requestExceededMaxLimit = requestExceededMaxLimit || response.RequestExceededMaxLimit
partialData = partialData || response.PartialData
if requestExceededMaxLimit {
frames[0].AppendNotices(data.Notice{
Severity: data.NoticeSeverityWarning,
Text: "cloudwatch GetMetricData error: Maximum number of allowed metrics exceeded. Your search may have been limited",
})
}
if partialData {
frames[0].AppendNotices(data.Notice{
Severity: data.NoticeSeverityWarning,
Text: "cloudwatch GetMetricData error: Too many datapoints requested - your search has been limited. Please try to reduce the time range",
})
}
executedQueries = append(executedQueries, executedQuery{
Expression: response.Expression,
ID: response.Id,
Period: response.Period,
})
}
sort.Slice(frames, func(i, j int) bool {
return frames[i].Name < frames[j].Name
})
eq, err := json.Marshal(executedQueries)
if err != nil {
return nil, fmt.Errorf("could not marshal executedString struct: %w", err)
}
link, err := buildDeepLink(refID, requestQueries, executedQueries, startTime, endTime)
if err != nil {
return nil, fmt.Errorf("could not build deep link: %w", err)
}
createDataLinks := func(link string) []data.DataLink {
return []data.DataLink{{
Title: "View in CloudWatch console",
TargetBlank: true,
URL: link,
}}
}
for _, frame := range frames {
if frame.Meta != nil {
frame.Meta.ExecutedQueryString = string(eq)
} else {
frame.Meta = &data.FrameMeta{
ExecutedQueryString: string(eq),
}
}
if link == "" || len(frame.Fields) < 2 {
continue
}
if frame.Fields[1].Config == nil {
frame.Fields[1].Config = &data.FieldConfig{}
}
frame.Fields[1].Config.Links = createDataLinks(link)
}
queryResult.Frames = frames
results[refID] = &queryResult
}
return results, nil
}
// buildDeepLink generates a deep link from Grafana to the CloudWatch console. The link params are based on
// metric(s) for a given query row in the Query Editor.
func buildDeepLink(refID string, requestQueries []*requestQuery, executedQueries []executedQuery, startTime time.Time,
endTime time.Time) (string, error) {
if isMathExpression(executedQueries) {
return "", nil
}
requestQuery := &requestQuery{}
for _, rq := range requestQueries {
if rq.RefId == refID {
requestQuery = rq
break
}
}
metricItems := []interface{}{}
cloudWatchLinkProps := &cloudWatchLink{
Title: refID,
View: "timeSeries",
Stacked: false,
Region: requestQuery.Region,
Start: startTime.UTC().Format(time.RFC3339),
End: endTime.UTC().Format(time.RFC3339),
}
expressions := []interface{}{}
for _, meta := range executedQueries {
if strings.Contains(meta.Expression, "SEARCH(") {
expressions = append(expressions, &metricExpression{Expression: meta.Expression})
}
}
if len(expressions) != 0 {
cloudWatchLinkProps.Metrics = expressions
} else {
for _, stat := range requestQuery.Statistics {
metricStat := []interface{}{requestQuery.Namespace, requestQuery.MetricName}
for dimensionKey, dimensionValues := range requestQuery.Dimensions {
metricStat = append(metricStat, dimensionKey, dimensionValues[0])
}
metricStat = append(metricStat, &metricStatMeta{
Stat: *stat,
Period: requestQuery.Period,
})
metricItems = append(metricItems, metricStat)
}
cloudWatchLinkProps.Metrics = metricItems
}
linkProps, err := json.Marshal(cloudWatchLinkProps)
if err != nil {
return "", fmt.Errorf("could not marshal link: %w", err)
}
url, err := url.Parse(fmt.Sprintf(`https://%s.console.aws.amazon.com/cloudwatch/deeplink.js`, requestQuery.Region))
if err != nil {
return "", fmt.Errorf("unable to parse CloudWatch console deep link")
}
fragment := url.Query()
fragment.Set("", string(linkProps))
q := url.Query()
q.Set("region", requestQuery.Region)
url.RawQuery = q.Encode()
link := fmt.Sprintf(`%s#metricsV2:graph%s`, url.String(), fragment.Encode())
return link, nil
}
func isMathExpression(executedQueries []executedQuery) bool {
isMathExpression := false
for _, query := range executedQueries {
if strings.Contains(query.Expression, "SEARCH(") {
return false
} else if query.Expression != "" {
isMathExpression = true
}
}
return isMathExpression
}

View File

@ -1,249 +0,0 @@
package cloudwatch
import (
"net/url"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestQueryTransformer(t *testing.T) {
executor := newExecutor(nil, nil, &setting.Cfg{}, fakeSessionCache{})
t.Run("One cloudwatchQuery is generated when its request query has one stat", func(t *testing.T) {
requestQueries := []*requestQuery{
{
RefId: "D",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average"}),
Period: 600,
Id: "",
},
}
res, err := executor.transformRequestQueriesToCloudWatchQueries(requestQueries)
require.NoError(t, err)
assert.Len(t, res, 1)
})
t.Run("Two cloudwatchQuery is generated when there's two stats", func(t *testing.T) {
requestQueries := []*requestQuery{
{
RefId: "D",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average", "Sum"}),
Period: 600,
Id: "",
},
}
res, err := executor.transformRequestQueriesToCloudWatchQueries(requestQueries)
require.NoError(t, err)
assert.Len(t, res, 2)
})
t.Run("id is given by user that will be used in the cloudwatch query", func(t *testing.T) {
requestQueries := []*requestQuery{
{
RefId: "D",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average"}),
Period: 600,
Id: "myid",
},
}
res, err := executor.transformRequestQueriesToCloudWatchQueries(requestQueries)
require.Nil(t, err)
assert.Equal(t, len(res), 1)
assert.Contains(t, res, "myid")
})
t.Run("ID is not given by user", func(t *testing.T) {
t.Run("ID will be generated based on ref ID if query only has one stat", func(t *testing.T) {
requestQueries := []*requestQuery{
{
RefId: "D",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average"}),
Period: 600,
Id: "",
},
}
res, err := executor.transformRequestQueriesToCloudWatchQueries(requestQueries)
require.NoError(t, err)
assert.Len(t, res, 1)
assert.Contains(t, res, "queryD")
})
t.Run("ID will be generated based on ref and stat name if query has two stats", func(t *testing.T) {
requestQueries := []*requestQuery{
{
RefId: "D",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average", "Sum"}),
Period: 600,
Id: "",
},
}
res, err := executor.transformRequestQueriesToCloudWatchQueries(requestQueries)
require.NoError(t, err)
assert.Len(t, res, 2)
assert.Contains(t, res, "queryD_Sum")
assert.Contains(t, res, "queryD_Average")
})
})
t.Run("dot should be removed when query has more than one stat and one of them is a percentile", func(t *testing.T) {
requestQueries := []*requestQuery{
{
RefId: "D",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average", "p46.32"}),
Period: 600,
Id: "",
},
}
res, err := executor.transformRequestQueriesToCloudWatchQueries(requestQueries)
require.NoError(t, err)
assert.Len(t, res, 2)
assert.Contains(t, res, "queryD_p46_32")
})
t.Run("should return an error if two queries have the same id", func(t *testing.T) {
requestQueries := []*requestQuery{
{
RefId: "D",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average", "p46.32"}),
Period: 600,
Id: "myId",
},
{
RefId: "E",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average", "p46.32"}),
Period: 600,
Id: "myId",
},
}
res, err := executor.transformRequestQueriesToCloudWatchQueries(requestQueries)
require.Nil(t, res)
assert.Error(t, err)
})
requestQueries := []*requestQuery{
{
RefId: "D",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Sum"}),
Period: 600,
Id: "myId",
},
{
RefId: "E",
Region: "us-east-1",
Namespace: "ec2",
MetricName: "CPUUtilization",
Statistics: aws.StringSlice([]string{"Average", "p46.32"}),
Period: 600,
Id: "myId",
},
}
t.Run("A deep link that reference two metric stat metrics is created based on a request query with two stats", func(t *testing.T) {
start, err := time.Parse(time.RFC3339, "2018-03-15T13:00:00Z")
require.NoError(t, err)
end, err := time.Parse(time.RFC3339, "2018-03-18T13:34:00Z")
require.NoError(t, err)
executedQueries := []executedQuery{{
Expression: ``,
ID: "D",
Period: 600,
}}
link, err := buildDeepLink("E", requestQueries, executedQueries, start, end)
require.NoError(t, err)
parsedURL, err := url.Parse(link)
require.NoError(t, err)
decodedLink, err := url.PathUnescape(parsedURL.String())
require.NoError(t, err)
expected := `https://us-east-1.console.aws.amazon.com/cloudwatch/deeplink.js?region=us-east-1#metricsV2:graph={"view":"timeSeries","stacked":false,"title":"E","start":"2018-03-15T13:00:00Z","end":"2018-03-18T13:34:00Z","region":"us-east-1","metrics":[["ec2","CPUUtilization",{"stat":"Average","period":600}],["ec2","CPUUtilization",{"stat":"p46.32","period":600}]]}`
assert.Equal(t, expected, decodedLink)
})
t.Run("A deep link that reference an expression based metric is created based on a request query with one stat", func(t *testing.T) {
start, err := time.Parse(time.RFC3339, "2018-03-15T13:00:00Z")
require.NoError(t, err)
end, err := time.Parse(time.RFC3339, "2018-03-18T13:34:00Z")
require.NoError(t, err)
executedQueries := []executedQuery{{
Expression: `REMOVE_EMPTY(SEARCH('Namespace="AWS/EC2" MetricName="CPUUtilization"', 'Sum', 600))`,
ID: "D",
Period: 600,
}}
link, err := buildDeepLink("E", requestQueries, executedQueries, start, end)
require.NoError(t, err)
parsedURL, err := url.Parse(link)
require.NoError(t, err)
decodedLink, err := url.PathUnescape(parsedURL.String())
require.NoError(t, err)
expected := `https://us-east-1.console.aws.amazon.com/cloudwatch/deeplink.js?region=us-east-1#metricsV2:graph={"view":"timeSeries","stacked":false,"title":"E","start":"2018-03-15T13:00:00Z","end":"2018-03-18T13:34:00Z","region":"us-east-1","metrics":[{"expression":"REMOVE_EMPTY(SEARCH('Namespace=\"AWS/EC2\"+MetricName=\"CPUUtilization\"',+'Sum',+600))"}]}`
assert.Equal(t, expected, decodedLink)
})
t.Run("A deep link is not built in case any of the executedQueries are math expressions", func(t *testing.T) {
start, err := time.Parse(time.RFC3339, "2018-03-15T13:00:00Z")
require.NoError(t, err)
end, err := time.Parse(time.RFC3339, "2018-03-18T13:34:00Z")
require.NoError(t, err)
executedQueries := []executedQuery{{
Expression: `a * 2`,
ID: "D",
Period: 600,
}}
link, err := buildDeepLink("E", requestQueries, executedQueries, start, end)
require.NoError(t, err)
parsedURL, err := url.Parse(link)
require.NoError(t, err)
decodedLink, err := url.PathUnescape(parsedURL.String())
require.NoError(t, err)
assert.Equal(t, "", decodedLink)
})
}

View File

@ -10,15 +10,19 @@ import (
"strings"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
)
// Parses the json queries and returns a requestQuery. The requestQuery has a 1 to 1 mapping to a query editor row
func (e *cloudWatchExecutor) parseQueries(queries []backend.DataQuery, startTime time.Time, endTime time.Time) (map[string][]*requestQuery, error) {
requestQueries := make(map[string][]*requestQuery)
for _, query := range queries {
// parseQueries parses the json queries and returns a map of cloudWatchQueries by region. The cloudWatchQuery has a 1 to 1 mapping to a query editor row
func (e *cloudWatchExecutor) parseQueries(queries []backend.DataQuery, startTime time.Time, endTime time.Time) (map[string][]*cloudWatchQuery, error) {
requestQueries := make(map[string][]*cloudWatchQuery)
migratedQueries, err := migrateLegacyQuery(queries, startTime, endTime)
if err != nil {
return nil, err
}
for _, query := range migratedQueries {
model, err := simplejson.NewJson(query.JSON)
if err != nil {
return nil, &queryError{err: err, RefID: query.RefID}
@ -36,7 +40,7 @@ func (e *cloudWatchExecutor) parseQueries(queries []backend.DataQuery, startTime
}
if _, exist := requestQueries[query.Region]; !exist {
requestQueries[query.Region] = make([]*requestQuery, 0)
requestQueries[query.Region] = []*cloudWatchQuery{}
}
requestQueries[query.Region] = append(requestQueries[query.Region], query)
}
@ -44,7 +48,41 @@ func (e *cloudWatchExecutor) parseQueries(queries []backend.DataQuery, startTime
return requestQueries, nil
}
func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time, endTime time.Time) (*requestQuery, error) {
// migrateLegacyQuery migrates queries that has a `statistics` field to use the `statistic` field instead.
// This migration is also done in the frontend, so this should only ever be needed for alerting queries
// In case the query used more than one stat, the first stat in the slice will be used in the statistic field
// Read more here https://github.com/grafana/grafana/issues/30629
func migrateLegacyQuery(queries []backend.DataQuery, startTime time.Time, endTime time.Time) ([]*backend.DataQuery, error) {
migratedQueries := []*backend.DataQuery{}
for _, q := range queries {
query := q
model, err := simplejson.NewJson(query.JSON)
if err != nil {
return nil, err
}
_, err = model.Get("statistic").String()
// If there's not a statistic property in the json, we know it's the legacy format and then it has to be migrated
if err != nil {
stats, err := model.Get("statistics").StringArray()
if err != nil {
return nil, fmt.Errorf("query must have either statistic or statistics field")
}
model.Del("statistics")
model.Set("statistic", stats[0])
query.JSON, err = model.MarshalJSON()
if err != nil {
return nil, err
}
}
migratedQueries = append(migratedQueries, &query)
}
return migratedQueries, nil
}
func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time, endTime time.Time) (*cloudWatchQuery, error) {
plog.Debug("Parsing request query", "query", model)
reNumber := regexp.MustCompile(`^\d+$`)
region, err := model.Get("region").String()
@ -63,7 +101,11 @@ func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time
if err != nil {
return nil, fmt.Errorf("failed to parse dimensions: %v", err)
}
statistics := parseStatistics(model)
statistic, err := model.Get("statistic").String()
if err != nil {
return nil, fmt.Errorf("failed to parse statistic: %v", err)
}
p := model.Get("period").MustString("")
var period int
@ -94,6 +136,12 @@ func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time
}
id := model.Get("id").MustString("")
if id == "" {
// Why not just use refId if id is not specified in the frontend? When specifying an id in the editor,
// and alphabetical must be used. The id must be unique, so if an id like for example a, b or c would be used,
// it would likely collide with some ref id. That's why the `query` prefix is used.
id = fmt.Sprintf("query%s", refId)
}
expression := model.Get("expression").MustString("")
alias := model.Get("alias").MustString()
returnData := !model.Get("hide").MustBool(false)
@ -107,19 +155,20 @@ func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time
matchExact := model.Get("matchExact").MustBool(true)
return &requestQuery{
RefId: refId,
Region: region,
Namespace: namespace,
MetricName: metricName,
Dimensions: dimensions,
Statistics: aws.StringSlice(statistics),
Period: period,
Alias: alias,
Id: id,
Expression: expression,
ReturnData: returnData,
MatchExact: matchExact,
return &cloudWatchQuery{
RefId: refId,
Region: region,
Id: id,
Namespace: namespace,
MetricName: metricName,
Statistic: statistic,
Expression: expression,
ReturnData: returnData,
Dimensions: dimensions,
Period: period,
Alias: alias,
MatchExact: matchExact,
UsedExpression: "",
}, nil
}
@ -136,15 +185,6 @@ func getRetainedPeriods(timeSince time.Duration) []int {
}
}
func parseStatistics(model *simplejson.Json) []string {
var statistics []string
for _, s := range model.Get("statistics").MustArray() {
statistics = append(statistics, s.(string))
}
return statistics
}
func parseDimensions(model *simplejson.Json) (map[string][]string, error) {
parsedDimensions := make(map[string][]string)
for k, v := range model.Get("dimensions").MustMap() {

View File

@ -4,14 +4,51 @@ import (
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestRequestParser(t *testing.T) {
timeRange := plugins.NewDataTimeRange("now-1h", "now-2h")
t.Run("Query migration ", func(t *testing.T) {
t.Run("legacy statistics field is migrated", func(t *testing.T) {
startTime := time.Now()
endTime := startTime.Add(2 * time.Hour)
oldQuery := &backend.DataQuery{
MaxDataPoints: 0,
QueryType: "timeSeriesQuery",
Interval: 0,
}
oldQuery.RefID = "A"
oldQuery.JSON = []byte(`{
"region": "us-east-1",
"namespace": "ec2",
"metricName": "CPUUtilization",
"dimensions": {
"InstanceId": ["test"]
},
"statistics": ["Average", "Sum"],
"period": "600",
"hide": false
}`)
migratedQueries, err := migrateLegacyQuery([]backend.DataQuery{*oldQuery}, startTime, endTime)
require.NoError(t, err)
assert.Equal(t, 1, len(migratedQueries))
migratedQuery := migratedQueries[0]
assert.Equal(t, "A", migratedQuery.RefID)
model, err := simplejson.NewJson(migratedQuery.JSON)
require.NoError(t, err)
assert.Equal(t, "Average", model.Get("statistic").MustString())
res, err := model.Get("statistic").Array()
assert.Error(t, err)
assert.Nil(t, res)
})
})
timeRange := tsdb.NewTimeRange("now-1h", "now-2h")
from, err := timeRange.ParseFrom()
require.NoError(t, err)
to, err := timeRange.ParseTo()
@ -29,9 +66,9 @@ func TestRequestParser(t *testing.T) {
"InstanceId": []interface{}{"test"},
"InstanceType": []interface{}{"test2", "test3"},
},
"statistics": []interface{}{"Average"},
"period": "600",
"hide": false,
"statistic": "Average",
"period": "600",
"hide": false,
})
res, err := parseRequestQuery(query, "ref1", from, to)
@ -40,7 +77,7 @@ func TestRequestParser(t *testing.T) {
assert.Equal(t, "ref1", res.RefId)
assert.Equal(t, "ec2", res.Namespace)
assert.Equal(t, "CPUUtilization", res.MetricName)
assert.Empty(t, res.Id)
assert.Equal(t, "queryref1", res.Id)
assert.Empty(t, res.Expression)
assert.Equal(t, 600, res.Period)
assert.True(t, res.ReturnData)
@ -48,8 +85,7 @@ func TestRequestParser(t *testing.T) {
assert.Len(t, res.Dimensions["InstanceId"], 1)
assert.Len(t, res.Dimensions["InstanceType"], 2)
assert.Equal(t, "test3", res.Dimensions["InstanceType"][1])
assert.Len(t, res.Statistics, 1)
assert.Equal(t, "Average", *res.Statistics[0])
assert.Equal(t, "Average", res.Statistic)
})
t.Run("Old dimensions structure (backwards compatibility)", func(t *testing.T) {
@ -64,9 +100,9 @@ func TestRequestParser(t *testing.T) {
"InstanceId": "test",
"InstanceType": "test2",
},
"statistics": []interface{}{"Average"},
"period": "600",
"hide": false,
"statistic": "Average",
"period": "600",
"hide": false,
})
res, err := parseRequestQuery(query, "ref1", from, to)
@ -75,7 +111,7 @@ func TestRequestParser(t *testing.T) {
assert.Equal(t, "ref1", res.RefId)
assert.Equal(t, "ec2", res.Namespace)
assert.Equal(t, "CPUUtilization", res.MetricName)
assert.Empty(t, res.Id)
assert.Equal(t, "queryref1", res.Id)
assert.Empty(t, res.Expression)
assert.Equal(t, 600, res.Period)
assert.True(t, res.ReturnData)
@ -83,7 +119,7 @@ func TestRequestParser(t *testing.T) {
assert.Len(t, res.Dimensions["InstanceId"], 1)
assert.Len(t, res.Dimensions["InstanceType"], 1)
assert.Equal(t, "test2", res.Dimensions["InstanceType"][0])
assert.Equal(t, "Average", *res.Statistics[0])
assert.Equal(t, "Average", res.Statistic)
})
t.Run("Period defined in the editor by the user is being used when time range is short", func(t *testing.T) {
@ -98,11 +134,11 @@ func TestRequestParser(t *testing.T) {
"InstanceId": "test",
"InstanceType": "test2",
},
"statistics": []interface{}{"Average"},
"hide": false,
"statistic": "Average",
"hide": false,
})
query.Set("period", "900")
timeRange := plugins.NewDataTimeRange("now-1h", "now-2h")
timeRange := tsdb.NewTimeRange("now-1h", "now-2h")
from, err := timeRange.ParseFrom()
require.NoError(t, err)
to, err := timeRange.ParseTo()
@ -125,9 +161,9 @@ func TestRequestParser(t *testing.T) {
"InstanceId": "test",
"InstanceType": "test2",
},
"statistics": []interface{}{"Average"},
"hide": false,
"period": "auto",
"statistic": "Average",
"hide": false,
"period": "auto",
})
t.Run("Time range is 5 minutes", func(t *testing.T) {

View File

@ -8,86 +8,119 @@ import (
"time"
"github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
)
func (e *cloudWatchExecutor) parseResponse(metricDataOutputs []*cloudwatch.GetMetricDataOutput,
queries map[string]*cloudWatchQuery) ([]*cloudwatchResponse, error) {
// Map from result ID -> label -> result
mdrs := make(map[string]map[string]*cloudwatch.MetricDataResult)
labels := map[string][]string{}
for _, mdo := range metricDataOutputs {
requestExceededMaxLimit := false
for _, message := range mdo.Messages {
if *message.Code == "MaxMetricsExceeded" {
requestExceededMaxLimit = true
}
}
for _, r := range mdo.MetricDataResults {
id := *r.Id
label := *r.Label
if _, exists := mdrs[id]; !exists {
mdrs[id] = make(map[string]*cloudwatch.MetricDataResult)
mdrs[id][label] = r
labels[id] = append(labels[id], label)
} else if _, exists := mdrs[id][label]; !exists {
mdrs[id][label] = r
labels[id] = append(labels[id], label)
} else {
mdr := mdrs[id][label]
mdr.Timestamps = append(mdr.Timestamps, r.Timestamps...)
mdr.Values = append(mdr.Values, r.Values...)
if *r.StatusCode == "Complete" {
mdr.StatusCode = r.StatusCode
}
}
queries[id].RequestExceededMaxLimit = requestExceededMaxLimit
}
func (e *cloudWatchExecutor) parseResponse(startTime time.Time, endTime time.Time, metricDataOutputs []*cloudwatch.GetMetricDataOutput,
queries []*cloudWatchQuery) ([]*responseWrapper, error) {
aggregatedResponse := aggregateResponse(metricDataOutputs)
queriesById := map[string]*cloudWatchQuery{}
for _, query := range queries {
queriesById[query.Id] = query
}
cloudWatchResponses := make([]*cloudwatchResponse, 0, len(mdrs))
for id, lr := range mdrs {
query := queries[id]
frames, partialData, err := parseMetricResults(lr, labels[id], query)
results := []*responseWrapper{}
for id, response := range aggregatedResponse {
queryRow := queriesById[id]
dataRes := backend.DataResponse{}
if response.HasArithmeticError {
dataRes.Error = fmt.Errorf("ArithmeticError in query %q: %s", queryRow.RefId, response.ArithmeticErrorMessage)
}
var err error
dataRes.Frames, err = buildDataFrames(startTime, endTime, response, queryRow)
if err != nil {
return nil, err
}
response := &cloudwatchResponse{
DataFrames: frames,
Period: query.Period,
Expression: query.UsedExpression,
RefId: query.RefId,
Id: query.Id,
RequestExceededMaxLimit: query.RequestExceededMaxLimit,
PartialData: partialData,
}
cloudWatchResponses = append(cloudWatchResponses, response)
results = append(results, &responseWrapper{
DataResponse: &dataRes,
RefId: queryRow.RefId,
})
}
return cloudWatchResponses, nil
return results, nil
}
func parseMetricResults(results map[string]*cloudwatch.MetricDataResult, labels []string,
query *cloudWatchQuery) (data.Frames, bool, error) {
partialData := false
frames := data.Frames{}
for _, label := range labels {
result := results[label]
if *result.StatusCode != "Complete" {
partialData = true
}
for _, message := range result.Messages {
if *message.Code == "ArithmeticError" {
return nil, false, fmt.Errorf("ArithmeticError in query %q: %s", query.RefId, *message.Value)
func aggregateResponse(getMetricDataOutputs []*cloudwatch.GetMetricDataOutput) map[string]queryRowResponse {
responseByID := make(map[string]queryRowResponse)
for _, gmdo := range getMetricDataOutputs {
requestExceededMaxLimit := false
for _, message := range gmdo.Messages {
if *message.Code == "MaxMetricsExceeded" {
requestExceededMaxLimit = true
}
}
for _, r := range gmdo.MetricDataResults {
id := *r.Id
label := *r.Label
response := newQueryRowResponse(id)
if _, exists := responseByID[id]; exists {
response = responseByID[id]
}
for _, message := range r.Messages {
if *message.Code == "ArithmeticError" {
response.addArithmeticError(message.Value)
}
}
if _, exists := response.Metrics[label]; !exists {
response.addMetricDataResult(r)
} else {
response.appendTimeSeries(r)
}
response.RequestExceededMaxLimit = response.RequestExceededMaxLimit || requestExceededMaxLimit
responseByID[id] = response
}
}
return responseByID
}
func getLabels(cloudwatchLabel string, query *cloudWatchQuery) data.Labels {
dims := make([]string, 0, len(query.Dimensions))
for k := range query.Dimensions {
dims = append(dims, k)
}
sort.Strings(dims)
labels := data.Labels{}
for _, dim := range dims {
values := query.Dimensions[dim]
if len(values) == 1 && values[0] != "*" {
labels[dim] = values[0]
} else {
for _, value := range values {
if value == cloudwatchLabel || value == "*" {
labels[dim] = cloudwatchLabel
} else if strings.Contains(cloudwatchLabel, value) {
labels[dim] = value
}
}
}
}
return labels
}
func buildDataFrames(startTime time.Time, endTime time.Time, aggregatedResponse queryRowResponse,
query *cloudWatchQuery) (data.Frames, error) {
frames := data.Frames{}
for _, label := range aggregatedResponse.Labels {
metric := aggregatedResponse.Metrics[label]
deepLink, err := query.buildDeepLink(startTime, endTime)
if err != nil {
return nil, err
}
// In case a multi-valued dimension is used and the cloudwatch query yields no values, create one empty time
// series for each dimension value. Use that dimension value to expand the alias field
if len(result.Values) == 0 && query.isMultiValuedDimensionExpression() {
if len(metric.Values) == 0 && query.isMultiValuedDimensionExpression() {
series := 0
multiValuedDimension := ""
for key, values := range query.Dimensions {
@ -98,18 +131,18 @@ func parseMetricResults(results map[string]*cloudwatch.MetricDataResult, labels
}
for _, value := range query.Dimensions[multiValuedDimension] {
tags := map[string]string{multiValuedDimension: value}
labels := map[string]string{multiValuedDimension: value}
for key, values := range query.Dimensions {
if key != multiValuedDimension && len(values) > 0 {
tags[key] = values[0]
labels[key] = values[0]
}
}
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []*time.Time{})
valueField := data.NewField(data.TimeSeriesValueFieldName, tags, []*float64{})
valueField := data.NewField(data.TimeSeriesValueFieldName, labels, []*float64{})
frameName := formatAlias(query, query.Stats, tags, label)
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: frameName})
frameName := formatAlias(query, query.Statistic, labels, label)
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: frameName, Links: createDataLinks(deepLink)})
emptyFrame := data.Frame{
Name: frameName,
@ -118,66 +151,63 @@ func parseMetricResults(results map[string]*cloudwatch.MetricDataResult, labels
valueField,
},
RefID: query.RefId,
Meta: createMeta(query),
}
frames = append(frames, &emptyFrame)
}
} else {
dims := make([]string, 0, len(query.Dimensions))
for k := range query.Dimensions {
dims = append(dims, k)
}
sort.Strings(dims)
tags := data.Labels{}
for _, dim := range dims {
values := query.Dimensions[dim]
if len(values) == 1 && values[0] != "*" {
tags[dim] = values[0]
} else {
for _, value := range values {
if value == label || value == "*" {
tags[dim] = label
} else if strings.Contains(label, value) {
tags[dim] = value
}
}
}
}
timestamps := []*time.Time{}
points := []*float64{}
for j, t := range result.Timestamps {
if j > 0 {
expectedTimestamp := result.Timestamps[j-1].Add(time.Duration(query.Period) * time.Second)
if expectedTimestamp.Before(*t) {
timestamps = append(timestamps, &expectedTimestamp)
points = append(points, nil)
}
}
val := result.Values[j]
timestamps = append(timestamps, t)
points = append(points, val)
}
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, timestamps)
valueField := data.NewField(data.TimeSeriesValueFieldName, tags, points)
frameName := formatAlias(query, query.Stats, tags, label)
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: frameName})
frame := data.Frame{
Name: frameName,
Fields: []*data.Field{
timeField,
valueField,
},
RefID: query.RefId,
}
frames = append(frames, &frame)
continue
}
labels := getLabels(label, query)
timestamps := []*time.Time{}
points := []*float64{}
for j, t := range metric.Timestamps {
if j > 0 {
expectedTimestamp := metric.Timestamps[j-1].Add(time.Duration(query.Period) * time.Second)
if expectedTimestamp.Before(*t) {
timestamps = append(timestamps, &expectedTimestamp)
points = append(points, nil)
}
}
val := metric.Values[j]
timestamps = append(timestamps, t)
points = append(points, val)
}
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, timestamps)
valueField := data.NewField(data.TimeSeriesValueFieldName, labels, points)
frameName := formatAlias(query, query.Statistic, labels, label)
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: frameName, Links: createDataLinks(deepLink)})
frame := data.Frame{
Name: frameName,
Fields: []*data.Field{
timeField,
valueField,
},
RefID: query.RefId,
Meta: createMeta(query),
}
if aggregatedResponse.RequestExceededMaxLimit {
frame.AppendNotices(data.Notice{
Severity: data.NoticeSeverityWarning,
Text: "cloudwatch GetMetricData error: Maximum number of allowed metrics exceeded. Your search may have been limited",
})
}
if aggregatedResponse.StatusCode != "Complete" {
frame.AppendNotices(data.Notice{
Severity: data.NoticeSeverityWarning,
Text: "cloudwatch GetMetricData error: Too many datapoints requested - your search has been limited. Please try to reduce the time range",
})
}
frames = append(frames, &frame)
}
return frames, partialData, nil
return frames, nil
}
func formatAlias(query *cloudWatchQuery, stat string, dimensions map[string]string, label string) string {
@ -231,3 +261,25 @@ func formatAlias(query *cloudWatchQuery, stat string, dimensions map[string]stri
return string(result)
}
func createDataLinks(link string) []data.DataLink {
dataLinks := []data.DataLink{}
if link != "" {
dataLinks = append(dataLinks, data.DataLink{
Title: "View in CloudWatch console",
TargetBlank: true,
URL: link,
})
}
return dataLinks
}
func createMeta(query *cloudWatchQuery) *data.FrameMeta {
return &data.FrameMeta{
ExecutedQueryString: query.UsedExpression,
Custom: simplejson.NewFromAny(map[string]interface{}{
"period": query.Period,
"id": query.Id,
}),
}
}

View File

@ -1,6 +1,8 @@
package cloudwatch
import (
"encoding/json"
"io/ioutil"
"testing"
"time"
@ -10,40 +12,86 @@ import (
"github.com/stretchr/testify/require"
)
func loadGetMetricDataOutputsFromFile() ([]*cloudwatch.GetMetricDataOutput, error) {
var getMetricDataOutputs []*cloudwatch.GetMetricDataOutput
jsonBody, err := ioutil.ReadFile("./test-data/multiple-outputs.json")
if err != nil {
return getMetricDataOutputs, err
}
err = json.Unmarshal(jsonBody, &getMetricDataOutputs)
return getMetricDataOutputs, err
}
func TestCloudWatchResponseParser(t *testing.T) {
startTime := time.Now()
endTime := startTime.Add(2 * time.Hour)
t.Run("when aggregating response", func(t *testing.T) {
getMetricDataOutputs, err := loadGetMetricDataOutputsFromFile()
require.NoError(t, err)
aggregatedResponse := aggregateResponse(getMetricDataOutputs)
t.Run("response for id a", func(t *testing.T) {
idA := "a"
t.Run("should have two labels", func(t *testing.T) {
assert.Len(t, aggregatedResponse[idA].Labels, 2)
assert.Len(t, aggregatedResponse[idA].Metrics, 2)
})
t.Run("should have points for label1 taken from both getMetricDataOutputs", func(t *testing.T) {
assert.Len(t, aggregatedResponse[idA].Metrics["label1"].Values, 10)
})
t.Run("should have statuscode 'Complete'", func(t *testing.T) {
assert.Equal(t, "Complete", aggregatedResponse[idA].StatusCode)
})
t.Run("should have exceeded request limit", func(t *testing.T) {
assert.True(t, aggregatedResponse[idA].RequestExceededMaxLimit)
})
})
t.Run("response for id b", func(t *testing.T) {
idB := "b"
t.Run("should have statuscode is 'Partial'", func(t *testing.T) {
assert.Equal(t, "Partial", aggregatedResponse[idB].StatusCode)
})
t.Run("should have an arithmetic error and an error message", func(t *testing.T) {
assert.True(t, aggregatedResponse[idB].HasArithmeticError)
assert.Equal(t, "One or more data-points have been dropped due to non-numeric values (NaN, -Infinite, +Infinite)", aggregatedResponse[idB].ArithmeticErrorMessage)
})
})
})
t.Run("Expand dimension value using exact match", func(t *testing.T) {
timestamp := time.Unix(0, 0)
labels := []string{"lb1", "lb2"}
mdrs := map[string]*cloudwatch.MetricDataResult{
"lb1": {
Id: aws.String("id1"),
Label: aws.String("lb1"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
response := &queryRowResponse{
Labels: []string{"lb1", "lb2"},
Metrics: map[string]*cloudwatch.MetricDataResult{
"lb1": {
Id: aws.String("id1"),
Label: aws.String("lb1"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
"lb2": {
Id: aws.String("id2"),
Label: aws.String("lb2"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
StatusCode: aws.String("Complete"),
},
"lb2": {
Id: aws.String("id2"),
Label: aws.String("lb2"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
}
@ -56,15 +104,14 @@ func TestCloudWatchResponseParser(t *testing.T) {
"LoadBalancer": {"lb1", "lb2"},
"TargetGroup": {"tg"},
},
Stats: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded",
Statistic: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded",
}
frames, partialData, err := parseMetricResults(mdrs, labels, query)
frames, err := buildDataFrames(startTime, endTime, *response, query)
require.NoError(t, err)
frame1 := frames[0]
assert.False(t, partialData)
assert.Equal(t, "lb1 Expanded", frame1.Name)
assert.Equal(t, "lb1", frame1.Fields[1].Labels["LoadBalancer"])
@ -75,39 +122,40 @@ func TestCloudWatchResponseParser(t *testing.T) {
t.Run("Expand dimension value using substring", func(t *testing.T) {
timestamp := time.Unix(0, 0)
labels := []string{"lb1 Sum", "lb2 Average"}
mdrs := map[string]*cloudwatch.MetricDataResult{
"lb1 Sum": {
Id: aws.String("id1"),
Label: aws.String("lb1 Sum"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
response := &queryRowResponse{
Labels: []string{"lb1 Sum", "lb2 Average"},
Metrics: map[string]*cloudwatch.MetricDataResult{
"lb1 Sum": {
Id: aws.String("id1"),
Label: aws.String("lb1 Sum"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
"lb2 Average": {
Id: aws.String("id2"),
Label: aws.String("lb2 Average"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
StatusCode: aws.String("Complete"),
},
"lb2 Average": {
Id: aws.String("id2"),
Label: aws.String("lb2 Average"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
}
}}
query := &cloudWatchQuery{
RefId: "refId1",
@ -118,15 +166,14 @@ func TestCloudWatchResponseParser(t *testing.T) {
"LoadBalancer": {"lb1", "lb2"},
"TargetGroup": {"tg"},
},
Stats: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded",
Statistic: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded",
}
frames, partialData, err := parseMetricResults(mdrs, labels, query)
frames, err := buildDataFrames(startTime, endTime, *response, query)
require.NoError(t, err)
frame1 := frames[0]
assert.False(t, partialData)
assert.Equal(t, "lb1 Expanded", frame1.Name)
assert.Equal(t, "lb1", frame1.Fields[1].Labels["LoadBalancer"])
@ -137,37 +184,39 @@ func TestCloudWatchResponseParser(t *testing.T) {
t.Run("Expand dimension value using wildcard", func(t *testing.T) {
timestamp := time.Unix(0, 0)
labels := []string{"lb3", "lb4"}
mdrs := map[string]*cloudwatch.MetricDataResult{
"lb3": {
Id: aws.String("lb3"),
Label: aws.String("lb3"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
response := &queryRowResponse{
Labels: []string{"lb3", "lb4"},
Metrics: map[string]*cloudwatch.MetricDataResult{
"lb3": {
Id: aws.String("lb3"),
Label: aws.String("lb3"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
"lb4": {
Id: aws.String("lb4"),
Label: aws.String("lb4"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
StatusCode: aws.String("Complete"),
},
"lb4": {
Id: aws.String("lb4"),
Label: aws.String("lb4"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
}
@ -180,35 +229,35 @@ func TestCloudWatchResponseParser(t *testing.T) {
"LoadBalancer": {"*"},
"TargetGroup": {"tg"},
},
Stats: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded",
Statistic: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded",
}
frames, partialData, err := parseMetricResults(mdrs, labels, query)
frames, err := buildDataFrames(startTime, endTime, *response, query)
require.NoError(t, err)
assert.False(t, partialData)
assert.Equal(t, "lb3 Expanded", frames[0].Name)
assert.Equal(t, "lb4 Expanded", frames[1].Name)
})
t.Run("Expand dimension value when no values are returned and a multi-valued template variable is used", func(t *testing.T) {
timestamp := time.Unix(0, 0)
labels := []string{"lb3"}
mdrs := map[string]*cloudwatch.MetricDataResult{
"lb3": {
Id: aws.String("lb3"),
Label: aws.String("lb3"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
response := &queryRowResponse{
Labels: []string{"lb3"},
Metrics: map[string]*cloudwatch.MetricDataResult{
"lb3": {
Id: aws.String("lb3"),
Label: aws.String("lb3"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{},
StatusCode: aws.String("Complete"),
},
Values: []*float64{},
StatusCode: aws.String("Complete"),
},
}
query := &cloudWatchQuery{
RefId: "refId1",
Region: "us-east-1",
@ -217,14 +266,13 @@ func TestCloudWatchResponseParser(t *testing.T) {
Dimensions: map[string][]string{
"LoadBalancer": {"lb1", "lb2"},
},
Stats: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded",
Statistic: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded",
}
frames, partialData, err := parseMetricResults(mdrs, labels, query)
frames, err := buildDataFrames(startTime, endTime, *response, query)
require.NoError(t, err)
assert.False(t, partialData)
assert.Len(t, frames, 2)
assert.Equal(t, "lb1 Expanded", frames[0].Name)
assert.Equal(t, "lb2 Expanded", frames[1].Name)
@ -232,18 +280,20 @@ func TestCloudWatchResponseParser(t *testing.T) {
t.Run("Expand dimension value when no values are returned and a multi-valued template variable and two single-valued dimensions are used", func(t *testing.T) {
timestamp := time.Unix(0, 0)
labels := []string{"lb3"}
mdrs := map[string]*cloudwatch.MetricDataResult{
"lb3": {
Id: aws.String("lb3"),
Label: aws.String("lb3"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
response := &queryRowResponse{
Labels: []string{"lb3"},
Metrics: map[string]*cloudwatch.MetricDataResult{
"lb3": {
Id: aws.String("lb3"),
Label: aws.String("lb3"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{},
StatusCode: aws.String("Complete"),
},
Values: []*float64{},
StatusCode: aws.String("Complete"),
},
}
@ -257,14 +307,13 @@ func TestCloudWatchResponseParser(t *testing.T) {
"InstanceType": {"micro"},
"Resource": {"res"},
},
Stats: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded {{InstanceType}} - {{Resource}}",
Statistic: "Average",
Period: 60,
Alias: "{{LoadBalancer}} Expanded {{InstanceType}} - {{Resource}}",
}
frames, partialData, err := parseMetricResults(mdrs, labels, query)
frames, err := buildDataFrames(startTime, endTime, *response, query)
require.NoError(t, err)
assert.False(t, partialData)
assert.Len(t, frames, 2)
assert.Equal(t, "lb1 Expanded micro - res", frames[0].Name)
assert.Equal(t, "lb2 Expanded micro - res", frames[1].Name)
@ -272,22 +321,24 @@ func TestCloudWatchResponseParser(t *testing.T) {
t.Run("Parse cloudwatch response", func(t *testing.T) {
timestamp := time.Unix(0, 0)
labels := []string{"lb"}
mdrs := map[string]*cloudwatch.MetricDataResult{
"lb": {
Id: aws.String("id1"),
Label: aws.String("lb"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
response := &queryRowResponse{
Labels: []string{"lb"},
Metrics: map[string]*cloudwatch.MetricDataResult{
"lb": {
Id: aws.String("id1"),
Label: aws.String("lb"),
Timestamps: []*time.Time{
aws.Time(timestamp),
aws.Time(timestamp.Add(60 * time.Second)),
aws.Time(timestamp.Add(180 * time.Second)),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
Values: []*float64{
aws.Float64(10),
aws.Float64(20),
aws.Float64(30),
},
StatusCode: aws.String("Complete"),
},
}
@ -300,15 +351,14 @@ func TestCloudWatchResponseParser(t *testing.T) {
"LoadBalancer": {"lb"},
"TargetGroup": {"tg"},
},
Stats: "Average",
Period: 60,
Alias: "{{namespace}}_{{metric}}_{{stat}}",
Statistic: "Average",
Period: 60,
Alias: "{{namespace}}_{{metric}}_{{stat}}",
}
frames, partialData, err := parseMetricResults(mdrs, labels, query)
frames, err := buildDataFrames(startTime, endTime, *response, query)
require.NoError(t, err)
frame := frames[0]
assert.False(t, partialData)
assert.Equal(t, "AWS/ApplicationELB_TargetResponseTime_Average", frame.Name)
assert.Equal(t, "Time", frame.Fields[0].Name)
assert.Equal(t, "lb", frame.Fields[1].Labels["LoadBalancer"])

View File

@ -0,0 +1,96 @@
[
{
"Messages": null,
"MetricDataResults": [
{
"Id": "a",
"Label": "label1",
"Messages": null,
"StatusCode": "Complete",
"Timestamps": [
"2021-01-15T19:44:00Z",
"2021-01-15T19:59:00Z",
"2021-01-15T20:14:00Z",
"2021-01-15T20:29:00Z",
"2021-01-15T20:44:00Z"
],
"Values": [
0.1333395078879982,
0.244268469636633,
0.15574387947267768,
0.14447563659125626,
0.15519743138527173
]
},
{
"Id": "a",
"Label": "label2",
"Messages": null,
"StatusCode": "Complete",
"Timestamps": [
"2021-01-15T19:44:00Z"
],
"Values": [
0.1333395078879982
]
},
{
"Id": "b",
"Label": "label2",
"Messages": null,
"StatusCode": "Complete",
"Timestamps": [
"2021-01-15T19:44:00Z"
],
"Values": [
0.1333395078879982
]
}
],
"NextToken": null
},
{
"Messages": [
{ "Code": "", "Value": null },
{ "Code": "MaxMetricsExceeded", "Value": null }
],
"MetricDataResults": [
{
"Id": "a",
"Label": "label1",
"Messages": null,
"StatusCode": "Complete",
"Timestamps": [
"2021-01-15T19:44:00Z",
"2021-01-15T19:59:00Z",
"2021-01-15T20:14:00Z",
"2021-01-15T20:29:00Z",
"2021-01-15T20:44:00Z"
],
"Values": [
0.1333395078879982,
0.244268469636633,
0.15574387947267768,
0.14447563659125626,
0.15519743138527173
]
},
{
"Id": "b",
"Label": "label2",
"Messages": [{
"Code": "ArithmeticError",
"Value": "One or more data-points have been dropped due to non-numeric values (NaN, -Infinite, +Infinite)"
}],
"StatusCode": "Partial",
"Timestamps": [
"2021-01-15T19:44:00Z"
],
"Values": [
0.1333395078879982
]
}
],
"NextToken": null
}
]

View File

@ -21,7 +21,6 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *ba
if len(req.Queries) == 0 {
return nil, fmt.Errorf("request contains no queries")
}
// startTime and endTime are always the same for all queries
startTime := req.Queries[0].TimeRange.From
endTime := req.Queries[0].TimeRange.To
@ -62,12 +61,7 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *ba
return err
}
queries, err := e.transformRequestQueriesToCloudWatchQueries(requestQueries)
if err != nil {
return err
}
metricDataInput, err := e.buildMetricDataInput(startTime, endTime, queries)
metricDataInput, err := e.buildMetricDataInput(startTime, endTime, requestQueries)
if err != nil {
return err
}
@ -77,22 +71,15 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *ba
return err
}
responses, err := e.parseResponse(mdo, queries)
res, err := e.parseResponse(startTime, endTime, mdo, requestQueries)
if err != nil {
return err
}
res, err := e.transformQueryResponsesToQueryResult(responses, requestQueries, startTime, endTime)
if err != nil {
return err
for _, responseWrapper := range res {
resultChan <- responseWrapper
}
for refID, queryRes := range res {
resultChan <- &responseWrapper{
DataResponse: queryRes,
RefId: refID,
}
}
return nil
})
}

View File

@ -2,37 +2,8 @@ package cloudwatch
import (
"fmt"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
type requestQuery struct {
RefId string
Region string
Id string
Namespace string
MetricName string
Statistics []*string
QueryType string
Expression string
ReturnData bool
Dimensions map[string][]string
ExtendedStatistics []*string
Period int
Alias string
MatchExact bool
}
type cloudwatchResponse struct {
DataFrames data.Frames
Id string
RefId string
Expression string
RequestExceededMaxLimit bool
PartialData bool
Period int
}
type queryError struct {
err error
RefID string
@ -42,11 +13,6 @@ func (e *queryError) Error() string {
return fmt.Sprintf("error parsing query %q, %s", e.RefID, e.err)
}
type executedQuery struct {
Expression, ID string
Period int
}
type cloudWatchLink struct {
View string `json:"view"`
Stacked bool `json:"stacked"`

View File

@ -1452,6 +1452,115 @@ describe('DashboardModel', () => {
`);
});
});
describe('migrating legacy CloudWatch queries', () => {
let model: any;
let panelTargets: any;
beforeEach(() => {
model = new DashboardModel({
annotations: {
list: [
{
actionPrefix: '',
alarmNamePrefix: '',
alias: '',
dimensions: {
InstanceId: 'i-123',
},
enable: true,
expression: '',
iconColor: 'red',
id: '',
matchExact: true,
metricName: 'CPUUtilization',
name: 'test',
namespace: 'AWS/EC2',
period: '',
prefixMatching: false,
region: 'us-east-2',
statistics: ['Minimum', 'Sum'],
},
],
},
panels: [
{
gridPos: {
h: 8,
w: 12,
x: 0,
y: 0,
},
id: 4,
options: {
legend: {
calcs: [],
displayMode: 'list',
placement: 'bottom',
},
tooltipOptions: {
mode: 'single',
},
},
targets: [
{
alias: '',
dimensions: {
InstanceId: 'i-123',
},
expression: '',
id: '',
matchExact: true,
metricName: 'CPUUtilization',
namespace: 'AWS/EC2',
period: '',
refId: 'A',
region: 'default',
statistics: ['Average', 'Minimum', 'p12.21'],
},
{
alias: '',
dimensions: {
InstanceId: 'i-123',
},
expression: '',
hide: false,
id: '',
matchExact: true,
metricName: 'CPUUtilization',
namespace: 'AWS/EC2',
period: '',
refId: 'B',
region: 'us-east-2',
statistics: ['Sum'],
},
],
title: 'Panel Title',
type: 'timeseries',
},
],
});
panelTargets = model.panels[0].targets;
});
it('multiple stats query should have been split into three', () => {
expect(panelTargets.length).toBe(4);
});
it('new stats query should get the right statistic', () => {
expect(panelTargets[0].statistic).toBe('Average');
expect(panelTargets[1].statistic).toBe('Sum');
expect(panelTargets[2].statistic).toBe('Minimum');
expect(panelTargets[3].statistic).toBe('p12.21');
});
it('new stats queries should be put in the end of the array', () => {
expect(panelTargets[0].refId).toBe('A');
expect(panelTargets[1].refId).toBe('B');
expect(panelTargets[2].refId).toBe('C');
expect(panelTargets[3].refId).toBe('D');
});
});
});
function createRow(options: any, panelDescriptions: any[]) {

View File

@ -20,6 +20,8 @@ import {
ValueMapping,
getActiveThreshold,
DataTransformerConfig,
AnnotationQuery,
DataQuery,
} from '@grafana/data';
// Constants
import {
@ -39,6 +41,11 @@ import { plugin as gaugePanelPlugin } from 'app/plugins/panel/gauge/module';
import { getStandardFieldConfigs, getStandardOptionEditors } from '@grafana/ui';
import { labelsToFieldsTransformer } from '../../../../../packages/grafana-data/src/transformations/transformers/labelsToFields';
import { mergeTransformer } from '../../../../../packages/grafana-data/src/transformations/transformers/merge';
import {
migrateMultipleStatsMetricsQuery,
migrateMultipleStatsAnnotationQuery,
} from 'app/plugins/datasource/cloudwatch/migrations';
import { CloudWatchMetricsQuery, CloudWatchAnnotationQuery } from 'app/plugins/datasource/cloudwatch/types';
standardEditorsRegistry.setInit(getStandardOptionEditors);
standardFieldConfigEditorRegistry.setInit(getStandardFieldConfigs);
@ -695,6 +702,31 @@ export class DashboardMigrator {
}
}
// Migrates metric queries and/or annotation queries that use more than one statistic.
// E.g query.statistics = ['Max', 'Min'] will be migrated to two queries - query1.statistic = 'Max' and query2.statistic = 'Min'
// New queries, that were created during migration, are put at the end of the array.
migrateCloudWatchQueries() {
for (const panel of this.dashboard.panels) {
for (const target of panel.targets) {
if (isLegacyCloudWatchQuery(target)) {
const newQueries = migrateMultipleStatsMetricsQuery(target, [...panel.targets]);
for (const newQuery of newQueries) {
panel.targets.push(newQuery);
}
}
}
}
for (const annotation of this.dashboard.annotations.list) {
if (isLegacyCloudWatchAnnotationQuery(annotation)) {
const newAnnotationQueries = migrateMultipleStatsAnnotationQuery(annotation);
for (const newAnnotationQuery of newAnnotationQueries) {
this.dashboard.annotations.list.push(newAnnotationQuery);
}
}
}
}
upgradeToGridLayout(old: any) {
let yPos = 0;
const widthFactor = GRID_COLUMN_COUNT / 12;
@ -1010,6 +1042,25 @@ function upgradeValueMappingsForPanel(panel: PanelModel) {
return panel;
}
function isLegacyCloudWatchQuery(target: DataQuery): target is CloudWatchMetricsQuery {
return (
target.hasOwnProperty('dimensions') &&
target.hasOwnProperty('namespace') &&
target.hasOwnProperty('region') &&
target.hasOwnProperty('statistics')
);
}
function isLegacyCloudWatchAnnotationQuery(target: AnnotationQuery<DataQuery>): target is CloudWatchAnnotationQuery {
return (
target.hasOwnProperty('dimensions') &&
target.hasOwnProperty('namespace') &&
target.hasOwnProperty('region') &&
target.hasOwnProperty('prefixMatching') &&
target.hasOwnProperty('statistics')
);
}
function upgradeValueMappings(oldMappings: any, thresholds?: ThresholdsConfig): ValueMapping[] | undefined {
if (!oldMappings) {
return undefined;

View File

@ -1016,6 +1016,7 @@ export class DashboardModel {
private updateSchema(old: any) {
const migrator = new DashboardMigrator(this);
migrator.updateSchema(old);
migrator.migrateCloudWatchQueries();
}
resetOriginalTime() {

View File

@ -1,5 +1,5 @@
import { defaultsDeep } from 'lodash';
import { AnnotationQuery } from './types';
import { CloudWatchAnnotationQuery } from './types';
export class CloudWatchAnnotationsQueryCtrl {
static templateUrl = 'partials/annotations.editor.html';
@ -17,7 +17,7 @@ export class CloudWatchAnnotationsQueryCtrl {
region: 'default',
id: '',
alias: '',
statistics: ['Average'],
statistic: 'Average',
matchExact: true,
prefixMatching: false,
actionPrefix: '',
@ -27,7 +27,7 @@ export class CloudWatchAnnotationsQueryCtrl {
this.onChange = this.onChange.bind(this);
}
onChange(query: AnnotationQuery) {
onChange(query: CloudWatchAnnotationQuery) {
Object.assign(this.annotation, query);
}
}

View File

@ -2,14 +2,14 @@ import React, { ChangeEvent } from 'react';
import { LegacyForms } from '@grafana/ui';
const { Switch } = LegacyForms;
import { PanelData } from '@grafana/data';
import { AnnotationQuery } from '../types';
import { CloudWatchAnnotationQuery } from '../types';
import { CloudWatchDatasource } from '../datasource';
import { QueryField, PanelQueryEditor } from './';
export type Props = {
query: AnnotationQuery;
query: CloudWatchAnnotationQuery;
datasource: CloudWatchDatasource;
onChange: (value: AnnotationQuery) => void;
onChange: (value: CloudWatchAnnotationQuery) => void;
data?: PanelData;
};
@ -20,7 +20,7 @@ export function AnnotationQueryEditor(props: React.PropsWithChildren<Props>) {
<>
<PanelQueryEditor
{...props}
onChange={(editorQuery: AnnotationQuery) => onChange({ ...query, ...editorQuery })}
onChange={(editorQuery: CloudWatchAnnotationQuery) => onChange({ ...query, ...editorQuery })}
onRunQuery={() => {}}
history={[]}
></PanelQueryEditor>

View File

@ -1,10 +1,9 @@
import React, { PureComponent, ChangeEvent } from 'react';
import { isEmpty } from 'lodash';
import { ExploreQueryFieldProps } from '@grafana/data';
import { ExploreQueryFieldProps, PanelData } from '@grafana/data';
import { LegacyForms, ValidationEvents, EventsWithValidation, Icon } from '@grafana/ui';
const { Input, Switch } = LegacyForms;
import { CloudWatchQuery, CloudWatchMetricsQuery, CloudWatchJsonData } from '../types';
import { CloudWatchQuery, CloudWatchMetricsQuery, CloudWatchJsonData, ExecutedQueryPreview } from '../types';
import { CloudWatchDatasource } from '../datasource';
import { QueryField, Alias, MetricsQueryFieldsEditor } from './';
@ -31,7 +30,7 @@ export const normalizeQuery = ({
region,
id,
alias,
statistics,
statistic,
period,
...rest
}: CloudWatchMetricsQuery): CloudWatchMetricsQuery => {
@ -43,7 +42,7 @@ export const normalizeQuery = ({
region: region || 'default',
id: id || '',
alias: alias || '',
statistics: isEmpty(statistics) ? ['Average'] : statistics,
statistic: statistic ?? 'Average',
period: period || '',
...rest,
};
@ -65,55 +64,65 @@ export class MetricsQueryEditor extends PureComponent<Props, State> {
onRunQuery();
}
getExecutedQueryPreview(data?: PanelData): ExecutedQueryPreview {
if (!(data?.series.length && data?.series[0].meta?.custom)) {
return {
executedQuery: '',
period: '',
id: '',
};
}
return {
executedQuery: data?.series[0].meta.executedQueryString ?? '',
period: data.series[0].meta.custom['period'],
id: data.series[0].meta.custom['id'],
};
}
render() {
const { data, onRunQuery } = this.props;
const metricsQuery = this.props.query as CloudWatchMetricsQuery;
const { showMeta } = this.state;
const query = normalizeQuery(metricsQuery);
const executedQueries =
data && data.series.length && data.series[0].meta && data.state === 'Done'
? data.series[0].meta.executedQueryString
: null;
const executedQueryPreview = this.getExecutedQueryPreview(data);
return (
<>
<MetricsQueryFieldsEditor {...{ ...this.props, query }}></MetricsQueryFieldsEditor>
{query.statistics.length <= 1 && (
<div className="gf-form-inline">
<div className="gf-form">
<QueryField
label="Id"
tooltip="Id can include numbers, letters, and underscore, and must start with a lowercase letter."
>
<Input
className="gf-form-input width-8"
onBlur={onRunQuery}
onChange={(event: ChangeEvent<HTMLInputElement>) =>
this.onChange({ ...metricsQuery, id: event.target.value })
}
validationEvents={idValidationEvents}
value={query.id}
/>
</QueryField>
</div>
<div className="gf-form gf-form--grow">
<QueryField
className="gf-form--grow"
label="Expression"
tooltip="Optionally you can add an expression here. Please note that if a math expression that is referencing other queries is being used, it will not be possible to create an alert rule based on this query"
>
<Input
className="gf-form-input"
onBlur={onRunQuery}
value={query.expression || ''}
onChange={(event: ChangeEvent<HTMLInputElement>) =>
this.onChange({ ...metricsQuery, expression: event.target.value })
}
/>
</QueryField>
</div>
<div className="gf-form-inline">
<div className="gf-form">
<QueryField
label="Id"
tooltip="Id can include numbers, letters, and underscore, and must start with a lowercase letter."
>
<Input
className="gf-form-input width-8"
onBlur={onRunQuery}
onChange={(event: ChangeEvent<HTMLInputElement>) =>
this.onChange({ ...metricsQuery, id: event.target.value })
}
validationEvents={idValidationEvents}
value={query.id}
/>
</QueryField>
</div>
)}
<div className="gf-form gf-form--grow">
<QueryField
className="gf-form--grow"
label="Expression"
tooltip="Optionally you can add an expression here. Please note that if a math expression that is referencing other queries is being used, it will not be possible to create an alert rule based on this query"
>
<Input
className="gf-form-input"
onBlur={onRunQuery}
value={query.expression || ''}
onChange={(event: ChangeEvent<HTMLInputElement>) =>
this.onChange({ ...metricsQuery, expression: event.target.value })
}
/>
</QueryField>
</div>
</div>
<div className="gf-form-inline">
<div className="gf-form">
<QueryField label="Period" tooltip="Minimum interval between points in seconds">
@ -153,21 +162,20 @@ export class MetricsQueryEditor extends PureComponent<Props, State> {
<label className="gf-form-label">
<a
onClick={() =>
executedQueries &&
executedQueryPreview &&
this.setState({
showMeta: !showMeta,
})
}
>
<Icon name={showMeta && executedQueries ? 'angle-down' : 'angle-right'} />{' '}
{showMeta && executedQueries ? 'Hide' : 'Show'} Query Preview
<Icon name={showMeta ? 'angle-down' : 'angle-right'} /> {showMeta ? 'Hide' : 'Show'} Query Preview
</a>
</label>
</div>
<div className="gf-form gf-form--grow">
<div className="gf-form-label gf-form-label--grow" />
</div>
{showMeta && executedQueries && (
{showMeta && (
<table className="filter-table form-inline">
<thead>
<tr>
@ -178,13 +186,11 @@ export class MetricsQueryEditor extends PureComponent<Props, State> {
</tr>
</thead>
<tbody>
{JSON.parse(executedQueries).map(({ ID, Expression, Period }: any) => (
<tr key={ID}>
<td>{ID}</td>
<td>{Expression}</td>
<td>{Period}</td>
</tr>
))}
<tr>
<td>{executedQueryPreview.id}</td>
<td>{executedQueryPreview.executedQuery}</td>
<td>{executedQueryPreview.period}</td>
</tr>
</tbody>
</table>
)}

View File

@ -3,7 +3,7 @@ import { SelectableValue } from '@grafana/data';
import { Segment, SegmentAsync } from '@grafana/ui';
import { CloudWatchMetricsQuery, SelectableStrings } from '../types';
import { CloudWatchDatasource } from '../datasource';
import { Dimensions, QueryInlineField, Stats } from '.';
import { Dimensions, QueryInlineField } from '.';
export type Props = {
query: CloudWatchMetricsQuery;
@ -120,12 +120,25 @@ export function MetricsQueryFieldsEditor({
/>
</QueryInlineField>
<QueryInlineField label="Stats">
<Stats
stats={datasource.standardStatistics.map(toOption)}
values={metricsQuery.statistics}
onChange={(statistics) => onQueryChange({ ...metricsQuery, statistics })}
variableOptionGroup={variableOptionGroup}
<QueryInlineField label="Statistic">
<Segment
allowCustomValue
value={query.statistic}
options={[
...datasource.standardStatistics.filter((s) => s !== query.statistic).map(toOption),
variableOptionGroup,
]}
onChange={({ value: statistic }) => {
if (
!datasource.standardStatistics.includes(statistic) &&
!/^p\d{2}(?:\.\d{1,2})?$/.test(statistic) &&
!statistic.startsWith('$')
) {
return;
}
onQueryChange({ ...metricsQuery, statistic });
}}
/>
</QueryInlineField>

View File

@ -1,21 +0,0 @@
import React from 'react';
import { render, screen } from '@testing-library/react';
import { Stats } from './Stats';
const toOption = (value: any) => ({ label: value, value });
describe('Stats', () => {
it('should render component', () => {
render(
<Stats
data-testid="stats"
values={['Average', 'Minimum']}
variableOptionGroup={{ label: 'templateVar', value: 'templateVar' }}
onChange={() => {}}
stats={['Average', 'Maximum', 'Minimum', 'Sum', 'SampleCount'].map(toOption)}
/>
);
expect(screen.getByText('Average')).toBeInTheDocument();
expect(screen.getByText('Minimum')).toBeInTheDocument();
});
});

View File

@ -1,45 +0,0 @@
import React, { FunctionComponent } from 'react';
import { SelectableStrings } from '../types';
import { SelectableValue } from '@grafana/data';
import { Segment, Icon } from '@grafana/ui';
export interface Props {
values: string[];
onChange: (values: string[]) => void;
variableOptionGroup: SelectableValue<string>;
stats: SelectableStrings;
}
const removeText = '-- remove stat --';
const removeOption: SelectableValue<string> = { label: removeText, value: removeText };
export const Stats: FunctionComponent<Props> = ({ stats, values, onChange, variableOptionGroup }) => (
<>
{values &&
values.map((value, index) => (
<Segment
allowCustomValue
key={value + index}
value={value}
options={[removeOption, ...stats, variableOptionGroup]}
onChange={({ value }) =>
onChange(
value === removeText
? values.filter((_, i) => i !== index)
: values.map((v, i) => (i === index ? value! : v))
)
}
/>
))}
<Segment
Component={
<a className="gf-form-label query-part">
<Icon name="plus" />
</a>
}
allowCustomValue
onChange={({ value }) => onChange([...values, value!])}
options={[...stats.filter(({ value }) => !values.includes(value!)), variableOptionGroup]}
/>
</>
);

View File

@ -1,3 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`QueryEditor should render component 1`] = `null`;

View File

@ -1,4 +1,3 @@
export { Stats } from './Stats';
export { Dimensions } from './Dimensions';
export { QueryInlineField, QueryField } from './Forms';
export { Alias } from './Alias';

View File

@ -263,8 +263,7 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
const validMetricsQueries = metricQueries
.filter(
(item) =>
(!!item.region && !!item.namespace && !!item.metricName && !isEmpty(item.statistics)) ||
item.expression?.length > 0
(!!item.region && !!item.namespace && !!item.metricName && !!item.statistic) || item.expression?.length > 0
)
.map(
(item: CloudWatchMetricsQuery): MetricQuery => {
@ -272,25 +271,11 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
item.namespace = this.replace(item.namespace, options.scopedVars, true, 'namespace');
item.metricName = this.replace(item.metricName, options.scopedVars, true, 'metric name');
item.dimensions = this.convertDimensionFormat(item.dimensions, options.scopedVars);
item.statistics = item.statistics.map((stat) => this.replace(stat, options.scopedVars, true, 'statistics'));
item.statistic = this.templateSrv.replace(item.statistic, options.scopedVars);
item.period = String(this.getPeriod(item, options)); // use string format for period in graph query, and alerting
item.id = this.templateSrv.replace(item.id, options.scopedVars);
item.expression = this.templateSrv.replace(item.expression, options.scopedVars);
// valid ExtendedStatistics is like p90.00, check the pattern
const hasInvalidStatistics = item.statistics.some((s) => {
if (s.indexOf('p') === 0) {
const matches = /^p\d{2}(?:\.\d{1,2})?$/.exec(s);
return !matches || matches[0] !== s;
}
return false;
});
if (hasInvalidStatistics) {
throw { message: 'Invalid extended statistics' };
}
return {
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
@ -558,22 +543,32 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
};
}),
catchError((err) => {
if (/^Throttling:.*/.test(err.data.message)) {
const isFrameError = err.data.results;
// Error is not frame specific
if (!isFrameError && err.data && err.data.message === 'Metric request error' && err.data.error) {
err.message = err.data.error;
return throwError(() => err);
}
// The error is either for a specific frame or for all the frames
const results: Array<{ error?: string }> = Object.values(err.data.results);
const firstErrorResult = results.find((r) => r.error);
if (firstErrorResult) {
err.message = firstErrorResult.error;
}
if (results.some((r) => r.error && /^Throttling:.*/.test(r.error))) {
const failedRedIds = Object.keys(err.data.results);
const regionsAffected = Object.values(request.queries).reduce(
(res: string[], { refId, region }) =>
(refId && !failedRedIds.includes(refId)) || res.includes(region) ? res : [...res, region],
[]
) as string[];
regionsAffected.forEach((region) => this.debouncedAlert(this.datasourceName, this.getActualRegion(region)));
}
if (err.data && err.data.message === 'Metric request error' && err.data.error) {
err.data.message = err.data.error;
}
return throwError(err);
return throwError(() => err);
})
);
}
@ -827,7 +822,7 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
annotationQuery(options: any) {
const annotation = options.annotation;
const statistics = annotation.statistics.map((s: any) => this.templateSrv.replace(s));
const statistic = this.templateSrv.replace(annotation.statistic);
const defaultPeriod = annotation.prefixMatching ? '' : '300';
let period = annotation.period || defaultPeriod;
period = parseInt(period, 10);
@ -837,7 +832,7 @@ export class CloudWatchDatasource extends DataSourceWithBackend<CloudWatchQuery,
namespace: this.templateSrv.replace(annotation.namespace),
metricName: this.templateSrv.replace(annotation.metricName),
dimensions: this.convertDimensionFormat(annotation.dimensions, {}),
statistics: statistics,
statistic: statistic,
period: period,
actionPrefix: annotation.actionPrefix || '',
alarmNamePrefix: annotation.alarmNamePrefix || '',

View File

@ -0,0 +1,118 @@
import { DataQuery } from '@grafana/data';
import { migrateMultipleStatsAnnotationQuery, migrateMultipleStatsMetricsQuery } from './migrations';
import { CloudWatchAnnotationQuery, CloudWatchMetricsQuery } from './types';
describe('migration', () => {
describe('migrateMultipleStatsMetricsQuery', () => {
const queryToMigrate = {
statistics: ['Average', 'Sum', 'Maximum'],
refId: 'A',
};
const panelQueries: DataQuery[] = [
{ ...queryToMigrate },
{
refId: 'B',
},
];
const newQueries = migrateMultipleStatsMetricsQuery(queryToMigrate as CloudWatchMetricsQuery, panelQueries);
const newMetricQueries = newQueries as CloudWatchMetricsQuery[];
it('should create one new query for each stat', () => {
expect(newQueries.length).toBe(2);
});
it('should assign new queries the right stats', () => {
expect(newMetricQueries[0].statistic).toBe('Sum');
expect(newMetricQueries[1].statistic).toBe('Maximum');
});
it('should assign new queries the right ref id', () => {
expect(newQueries[0].refId).toBe('C');
expect(newQueries[1].refId).toBe('D');
});
it('should not have statistics prop anymore', () => {
expect(queryToMigrate).not.toHaveProperty('statistics');
expect(newQueries[0]).not.toHaveProperty('statistics');
expect(newQueries[1]).not.toHaveProperty('statistics');
});
});
describe('migrateMultipleStatsMetricsQuery with only one stat', () => {
const queryToMigrate = {
statistics: ['Average'],
refId: 'A',
} as CloudWatchMetricsQuery;
const panelQueries: DataQuery[] = [
{ ...queryToMigrate },
{
refId: 'B',
},
];
const newQueries = migrateMultipleStatsMetricsQuery(queryToMigrate as CloudWatchMetricsQuery, panelQueries);
it('should not create any new queries', () => {
expect(newQueries.length).toBe(0);
});
it('should have the right stats', () => {
expect(queryToMigrate.statistic).toBe('Average');
});
it('should not have statistics prop anymore', () => {
expect(queryToMigrate).not.toHaveProperty('statistics');
});
});
describe('migrateMultipleStatsAnnotationQuery', () => {
const annotationToMigrate = {
statistics: ['p23.23', 'SampleCount'],
name: 'Test annotation',
};
const newAnnotations = migrateMultipleStatsAnnotationQuery(annotationToMigrate as CloudWatchAnnotationQuery);
const newCloudWatchAnnotations = newAnnotations as CloudWatchAnnotationQuery[];
it('should create one new annotation for each stat', () => {
expect(newAnnotations.length).toBe(1);
});
it('should assign new queries the right stats', () => {
expect(newCloudWatchAnnotations[0].statistic).toBe('SampleCount');
});
it('should assign new queries the right ref id', () => {
expect(newAnnotations[0].name).toBe('Test annotation - SampleCount');
});
it('should not have statistics prop anymore', () => {
expect(newCloudWatchAnnotations[0]).not.toHaveProperty('statistics');
});
it('should migrate original query correctly', () => {
expect(annotationToMigrate).not.toHaveProperty('statistics');
expect(annotationToMigrate.name).toBe('Test annotation - p23.23');
});
describe('migrateMultipleStatsAnnotationQuery with only with stat', () => {
const annotationToMigrate = {
statistics: ['p23.23'],
name: 'Test annotation',
} as CloudWatchAnnotationQuery;
const newAnnotations = migrateMultipleStatsAnnotationQuery(annotationToMigrate as CloudWatchAnnotationQuery);
it('should not create new annotations', () => {
expect(newAnnotations.length).toBe(0);
});
it('should not change the name', () => {
expect(annotationToMigrate.name).toBe('Test annotation');
});
it('should use statistics prop and remove statistics prop', () => {
expect(annotationToMigrate.statistic).toEqual('p23.23');
expect(annotationToMigrate).not.toHaveProperty('statistics');
});
});
});
});

View File

@ -0,0 +1,44 @@
import { AnnotationQuery, DataQuery } from '@grafana/data';
import { getNextRefIdChar } from 'app/core/utils/query';
import { CloudWatchAnnotationQuery, CloudWatchMetricsQuery } from './types';
export function migrateMultipleStatsMetricsQuery(
query: CloudWatchMetricsQuery,
panelQueries: DataQuery[]
): DataQuery[] {
const newQueries = [];
if (query?.statistics && query?.statistics.length) {
query.statistic = query.statistics[0];
for (const stat of query.statistics.splice(1)) {
newQueries.push({ ...query, statistic: stat });
}
}
for (const newTarget of newQueries) {
newTarget.refId = getNextRefIdChar(panelQueries);
delete newTarget.statistics;
panelQueries.push(newTarget);
}
delete query.statistics;
return newQueries;
}
export function migrateMultipleStatsAnnotationQuery(
annotationQuery: CloudWatchAnnotationQuery
): Array<AnnotationQuery<DataQuery>> {
const newAnnotations: CloudWatchAnnotationQuery[] = [];
if (annotationQuery?.statistics && annotationQuery?.statistics.length) {
for (const stat of annotationQuery.statistics.splice(1)) {
const { statistics, name, ...newAnnotation } = annotationQuery;
newAnnotations.push({ ...newAnnotation, statistic: stat, name: `${name} - ${stat}` });
}
annotationQuery.statistic = annotationQuery.statistics[0];
// Only change the name of the original if new annotations have been created
if (newAnnotations.length !== 0) {
annotationQuery.name = `${annotationQuery.name} - ${annotationQuery.statistic}`;
}
delete annotationQuery.statistics;
}
return newAnnotations as Array<AnnotationQuery<DataQuery>>;
}

View File

@ -1,4 +1,3 @@
import './query_parameter_ctrl';
import { DataSourcePlugin } from '@grafana/data';
import { ConfigEditor } from './components/ConfigEditor';
import { CloudWatchDatasource } from './datasource';

View File

@ -1,241 +0,0 @@
import angular from 'angular';
import coreModule from 'app/core/core_module';
import { each, flatten, isEmpty, map, reduce } from 'lodash';
import { TemplateSrv } from '@grafana/runtime';
export class CloudWatchQueryParameterCtrl {
/** @ngInject */
constructor($scope: any, templateSrv: TemplateSrv, uiSegmentSrv: any) {
$scope.init = () => {
const target = $scope.target;
target.namespace = target.namespace || '';
target.metricName = target.metricName || '';
target.statistics = target.statistics || ['Average'];
target.dimensions = target.dimensions || {};
target.period = target.period || '';
target.region = target.region || 'default';
target.id = target.id || '';
target.expression = target.expression || '';
$scope.regionSegment = uiSegmentSrv.getSegmentForValue($scope.target.region, 'select region');
$scope.namespaceSegment = uiSegmentSrv.getSegmentForValue($scope.target.namespace, 'select namespace');
$scope.metricSegment = uiSegmentSrv.getSegmentForValue($scope.target.metricName, 'select metric');
$scope.dimSegments = reduce(
$scope.target.dimensions,
(memo, value, key) => {
memo.push(uiSegmentSrv.newKey(key));
memo.push(uiSegmentSrv.newOperator('='));
memo.push(uiSegmentSrv.newKeyValue(value));
return memo;
},
[] as any
);
$scope.statSegments = map($scope.target.statistics, (stat) => {
return uiSegmentSrv.getSegmentForValue(stat);
});
$scope.ensurePlusButton($scope.statSegments);
$scope.ensurePlusButton($scope.dimSegments);
$scope.removeDimSegment = uiSegmentSrv.newSegment({
fake: true,
value: '-- remove dimension --',
});
$scope.removeStatSegment = uiSegmentSrv.newSegment({
fake: true,
value: '-- remove stat --',
});
if (isEmpty($scope.target.region)) {
$scope.target.region = 'default';
}
if (!$scope.onChange) {
$scope.onChange = () => {};
}
};
$scope.getStatSegments = () => {
return Promise.resolve(
flatten([
angular.copy($scope.removeStatSegment),
map($scope.datasource.standardStatistics, (s) => {
return uiSegmentSrv.getSegmentForValue(s);
}),
uiSegmentSrv.getSegmentForValue('pNN.NN'),
])
);
};
$scope.statSegmentChanged = (segment: any, index: number) => {
if (segment.value === $scope.removeStatSegment.value) {
$scope.statSegments.splice(index, 1);
} else {
segment.type = 'value';
}
$scope.target.statistics = reduce(
$scope.statSegments,
(memo, seg) => {
if (!seg.fake) {
memo.push(seg.value);
}
return memo;
},
[] as any
);
$scope.ensurePlusButton($scope.statSegments);
$scope.onChange();
};
$scope.ensurePlusButton = (segments: any) => {
const count = segments.length;
const lastSegment = segments[Math.max(count - 1, 0)];
if (!lastSegment || lastSegment.type !== 'plus-button') {
segments.push(uiSegmentSrv.newPlusButton());
}
};
$scope.getDimSegments = (segment: any, $index: number) => {
if (segment.type === 'operator') {
return Promise.resolve([]);
}
const target = $scope.target;
let query = Promise.resolve([] as any[]);
if (segment.type === 'key' || segment.type === 'plus-button') {
query = $scope.datasource.getDimensionKeys($scope.target.namespace, $scope.target.region);
} else if (segment.type === 'value') {
const dimensionKey = $scope.dimSegments[$index - 2].value;
delete target.dimensions[dimensionKey];
query = $scope.datasource.getDimensionValues(
target.region,
target.namespace,
target.metricName,
dimensionKey,
target.dimensions
);
}
return query.then($scope.transformToSegments(true)).then((results) => {
if (segment.type === 'key') {
results.splice(0, 0, angular.copy($scope.removeDimSegment));
}
return results;
});
};
$scope.dimSegmentChanged = (segment: any, index: number) => {
$scope.dimSegments[index] = segment;
if (segment.value === $scope.removeDimSegment.value) {
$scope.dimSegments.splice(index, 3);
} else if (segment.type === 'plus-button') {
$scope.dimSegments.push(uiSegmentSrv.newOperator('='));
$scope.dimSegments.push(uiSegmentSrv.newFake('select dimension value', 'value', 'query-segment-value'));
segment.type = 'key';
segment.cssClass = 'query-segment-key';
}
$scope.syncDimSegmentsWithModel();
$scope.ensurePlusButton($scope.dimSegments);
$scope.onChange();
};
$scope.syncDimSegmentsWithModel = () => {
const dims: any = {};
const length = $scope.dimSegments.length;
for (let i = 0; i < length - 2; i += 3) {
const keySegment = $scope.dimSegments[i];
const valueSegment = $scope.dimSegments[i + 2];
if (!valueSegment.fake) {
dims[keySegment.value] = valueSegment.value;
}
}
$scope.target.dimensions = dims;
};
$scope.getRegions = () => {
return $scope.datasource
.metricFindQuery('regions()')
.then((results: any) => {
results.unshift({ text: 'default' });
return results;
})
.then($scope.transformToSegments(true));
};
$scope.getNamespaces = () => {
return $scope.datasource.metricFindQuery('namespaces()').then($scope.transformToSegments(true));
};
$scope.getMetrics = () => {
return $scope.datasource
.metricFindQuery('metrics(' + $scope.target.namespace + ',' + $scope.target.region + ')')
.then($scope.transformToSegments(true));
};
$scope.regionChanged = () => {
$scope.target.region = $scope.regionSegment.value;
$scope.onChange();
};
$scope.namespaceChanged = () => {
$scope.target.namespace = $scope.namespaceSegment.value;
$scope.onChange();
};
$scope.metricChanged = () => {
$scope.target.metricName = $scope.metricSegment.value;
$scope.onChange();
};
$scope.transformToSegments = (addTemplateVars: any) => {
return (results: any) => {
const segments = map(results, (segment) => {
return uiSegmentSrv.newSegment({
value: segment.text,
expandable: segment.expandable,
});
});
if (addTemplateVars) {
each(templateSrv.getVariables(), (variable) => {
segments.unshift(
uiSegmentSrv.newSegment({
type: 'template',
value: '$' + variable.name,
expandable: true,
})
);
});
}
return segments;
};
};
$scope.init();
}
}
export function cloudWatchQueryParameter() {
return {
templateUrl: 'public/app/plugins/datasource/cloudwatch/partials/query.parameter.html',
controller: CloudWatchQueryParameterCtrl,
restrict: 'E',
scope: {
target: '=',
datasource: '=',
onChange: '&',
},
};
}
coreModule.directive('cloudwatchQueryParameter', cloudWatchQueryParameter);

View File

@ -2,7 +2,6 @@ import { interval, lastValueFrom, of, throwError } from 'rxjs';
import {
DataFrame,
DataQueryErrorType,
DataQueryRequest,
DataQueryResponse,
DataSourceInstanceSettings,
dateMath,
@ -17,7 +16,6 @@ import {
CloudWatchLogsQuery,
CloudWatchLogsQueryStatus,
CloudWatchMetricsQuery,
CloudWatchQuery,
LogAction,
} from '../types';
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
@ -378,7 +376,7 @@ describe('CloudWatchDatasource', () => {
dimensions: {
InstanceId: 'i-12345678',
},
statistics: ['Average'],
statistic: 'Average',
period: '300',
},
],
@ -419,7 +417,7 @@ describe('CloudWatchDatasource', () => {
namespace: query.targets[0].namespace,
metricName: query.targets[0].metricName,
dimensions: { InstanceId: ['i-12345678'] },
statistics: query.targets[0].statistics,
statistic: query.targets[0].statistic,
period: query.targets[0].period,
}),
])
@ -457,7 +455,7 @@ describe('CloudWatchDatasource', () => {
dimensions: {
InstanceId: 'i-12345678',
},
statistics: ['Average'],
statistic: 'Average',
period: '[[period]]',
},
],
@ -470,30 +468,6 @@ describe('CloudWatchDatasource', () => {
});
});
it.each(['pNN.NN', 'p9', 'p99.', 'p99.999'])('should cancel query for invalid extended statistics (%s)', (stat) => {
const { ds } = getTestContext({ response });
const query: DataQueryRequest<CloudWatchQuery> = ({
range: defaultTimeRange,
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'us-east-1',
namespace: 'AWS/EC2',
metricName: 'CPUUtilization',
dimensions: {
InstanceId: 'i-12345678',
},
statistics: [stat],
period: '60s',
},
],
} as unknown) as DataQueryRequest<CloudWatchQuery>;
expect(ds.query.bind(ds, query)).toThrow(/Invalid extended statistics/);
});
it('should return series list', async () => {
const { ds } = getTestContext({ response });
@ -512,7 +486,7 @@ describe('CloudWatchDatasource', () => {
dimensions: {
InstanceId: 'i-12345678',
},
statistics: ['Average'],
statistic: 'Average',
period: '300',
expression: '',
};
@ -645,7 +619,7 @@ describe('CloudWatchDatasource', () => {
dimensions: {
InstanceId: 'i-12345678',
},
statistics: ['Average'],
statistic: 'Average',
period: '300s',
},
],
@ -704,7 +678,7 @@ describe('CloudWatchDatasource', () => {
[`$${variableName}`]: `$${variableName}`,
},
matchExact: false,
statistics: [],
statistic: '',
};
ds.interpolateVariablesInQueries([logQuery], {});
@ -715,7 +689,7 @@ describe('CloudWatchDatasource', () => {
});
});
describe('When performing CloudWatch query for extended statistics', () => {
describe('When performing CloudWatch query for extended statistic', () => {
const query: any = {
range: defaultTimeRange,
rangeRaw: { from: 1483228800, to: 1483232400 },
@ -730,7 +704,7 @@ describe('CloudWatchDatasource', () => {
LoadBalancer: 'lb',
TargetGroup: 'tg',
},
statistics: ['p90.00'],
statistic: 'p90.00',
period: '300s',
},
],
@ -856,7 +830,7 @@ describe('CloudWatchDatasource', () => {
dimensions: {
dim2: '[[var2]]',
},
statistics: ['Average'],
statistic: 'Average',
period: '300s',
},
],
@ -884,7 +858,7 @@ describe('CloudWatchDatasource', () => {
dim2: '[[var2]]',
dim3: '[[var3]]',
},
statistics: ['Average'],
statistic: 'Average',
period: '300s',
},
],
@ -918,7 +892,7 @@ describe('CloudWatchDatasource', () => {
dim3: '[[var3]]',
dim4: '[[var4]]',
},
statistics: ['Average'],
statistic: 'Average',
period: '300s',
},
],
@ -948,7 +922,7 @@ describe('CloudWatchDatasource', () => {
dim2: '[[var2]]',
dim3: '[[var3]]',
},
statistics: ['Average'],
statistic: 'Average',
period: '300',
},
],

View File

@ -11,7 +11,11 @@ export interface CloudWatchMetricsQuery extends DataQuery {
metricName: string;
dimensions: { [key: string]: string | string[] };
statistics: string[];
statistic: string;
/**
* @deprecated use statistic
*/
statistics?: string[];
period: string;
alias: string;
matchExact: boolean;
@ -49,7 +53,10 @@ export type CloudWatchQuery = CloudWatchMetricsQuery | CloudWatchLogsQuery;
export const isCloudWatchLogsQuery = (cloudwatchQuery: CloudWatchQuery): cloudwatchQuery is CloudWatchLogsQuery =>
(cloudwatchQuery as CloudWatchLogsQuery).queryMode === 'Logs';
export interface AnnotationQuery extends CloudWatchMetricsQuery {
export interface CloudWatchAnnotationQuery extends CloudWatchMetricsQuery {
enable: boolean;
name: string;
iconColor: string;
prefixMatching: boolean;
actionPrefix: string;
alarmNamePrefix: string;
@ -320,17 +327,8 @@ export interface MetricQuery {
// IntervalMs int64
// }
export interface CloudWatchMetricsAnnotation {
namespace: string;
metricName: string;
expression: string;
dimensions: {};
region: string;
export interface ExecutedQueryPreview {
id: string;
alias: string;
statistics: string[];
matchExact: true;
prefixMatching: false;
actionPrefix: string;
alarmNamePrefix: string;
executedQuery: string;
period: string;
}