Cloudwatch: Fix duplicated time series (#35433)

* make sure queries are only ran once

* test aliases

* use correct dates
This commit is contained in:
Erik Sundell 2021-06-10 10:23:17 +02:00 committed by GitHub
parent e3afb63e62
commit eff2410bae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 193 additions and 97 deletions

View File

@ -15,9 +15,9 @@ import (
)
// Parses the json queries and returns a requestQuery. The requestQuery has a 1 to 1 mapping to a query editor row
func (e *cloudWatchExecutor) parseQueries(req *backend.QueryDataRequest, startTime time.Time, endTime time.Time) (map[string][]*requestQuery, error) {
func (e *cloudWatchExecutor) parseQueries(queries []backend.DataQuery, startTime time.Time, endTime time.Time) (map[string][]*requestQuery, error) {
requestQueries := make(map[string][]*requestQuery)
for _, query := range req.Queries {
for _, query := range queries {
model, err := simplejson.NewJson(query.JSON)
if err != nil {
return nil, &queryError{err: err, RefID: query.RefID}

View File

@ -51,12 +51,17 @@ func (m FakeCWLogsClient) GetLogGroupFieldsWithContext(ctx context.Context, inpu
type FakeCWClient struct {
cloudwatchiface.CloudWatchAPI
cloudwatch.GetMetricDataOutput
Metrics []*cloudwatch.Metric
MetricsPerPage int
}
func (c FakeCWClient) GetMetricDataWithContext(aws.Context, *cloudwatch.GetMetricDataInput, ...request.Option) (*cloudwatch.GetMetricDataOutput, error) {
return &c.GetMetricDataOutput, nil
}
func (c FakeCWClient) ListMetricsPages(input *cloudwatch.ListMetricsInput, fn func(*cloudwatch.ListMetricsOutput, bool) bool) error {
if c.MetricsPerPage == 0 {
c.MetricsPerPage = 1000

View File

@ -5,24 +5,31 @@ import (
"fmt"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/log"
"golang.org/x/sync/errgroup"
)
type responseWrapper struct {
DataResponse *backend.DataResponse
RefId string
}
func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
plog.Debug("Executing time series query")
resp := backend.NewQueryDataResponse()
for _, q := range req.Queries {
startTime := q.TimeRange.From
endTime := q.TimeRange.To
if len(req.Queries) == 0 {
return nil, fmt.Errorf("request contains no queries")
}
// startTime and endTime are always the same for all queries
startTime := req.Queries[0].TimeRange.From
endTime := req.Queries[0].TimeRange.To
if !startTime.Before(endTime) {
return nil, fmt.Errorf("invalid time range: start time must be before end time")
}
requestQueriesByRegion, err := e.parseQueries(req, startTime, endTime)
requestQueriesByRegion, err := e.parseQueries(req.Queries, startTime, endTime)
if err != nil {
return nil, err
}
@ -31,7 +38,7 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *ba
return backend.NewQueryDataResponse(), nil
}
resultChan := make(chan *backend.DataResponse, len(req.Queries))
resultChan := make(chan *responseWrapper, len(req.Queries))
eg, ectx := errgroup.WithContext(ctx)
for r, q := range requestQueriesByRegion {
requestQueries := q
@ -41,8 +48,10 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *ba
if err := recover(); err != nil {
plog.Error("Execute Get Metric Data Query Panic", "error", err, "stack", log.Stack(1))
if theErr, ok := err.(error); ok {
resultChan <- &backend.DataResponse{
resultChan <- &responseWrapper{
DataResponse: &backend.DataResponse{
Error: theErr,
},
}
}
}
@ -55,13 +64,7 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *ba
queries, err := e.transformRequestQueriesToCloudWatchQueries(requestQueries)
if err != nil {
for _, query := range requestQueries {
resultChan <- &backend.DataResponse{
Frames: data.Frames{data.NewFrame(query.RefId)},
Error: err,
}
}
return nil
return err
}
metricDataInput, err := e.buildMetricDataInput(startTime, endTime, queries)
@ -69,43 +72,26 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *ba
return err
}
cloudwatchResponses := make([]*cloudwatchResponse, 0)
mdo, err := e.executeRequest(ectx, client, metricDataInput)
if err != nil {
for _, query := range requestQueries {
resultChan <- &backend.DataResponse{
Frames: data.Frames{data.NewFrame(query.RefId)},
Error: err,
}
}
return nil
return err
}
responses, err := e.parseResponse(mdo, queries)
if err != nil {
for _, query := range requestQueries {
resultChan <- &backend.DataResponse{
Frames: data.Frames{data.NewFrame(query.RefId)},
Error: err,
}
}
return nil
return err
}
cloudwatchResponses = append(cloudwatchResponses, responses...)
res, err := e.transformQueryResponsesToQueryResult(cloudwatchResponses, requestQueries, startTime, endTime)
res, err := e.transformQueryResponsesToQueryResult(responses, requestQueries, startTime, endTime)
if err != nil {
for _, query := range requestQueries {
resultChan <- &backend.DataResponse{
Frames: data.Frames{data.NewFrame(query.RefId)},
Error: err,
}
}
return nil
return err
}
for _, queryRes := range res {
resultChan <- queryRes
for refID, queryRes := range res {
resultChan <- &responseWrapper{
DataResponse: queryRes,
RefId: refID,
}
}
return nil
})
@ -117,8 +103,7 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, req *ba
close(resultChan)
for result := range resultChan {
resp.Responses[q.RefID] = *result
}
resp.Responses[result.RefId] = *result.DataResponse
}
return resp, nil

View File

@ -2,18 +2,124 @@ package cloudwatch
import (
"context"
"encoding/json"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/aws/aws-sdk-go/service/cloudwatch/cloudwatchiface"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTimeSeriesQuery(t *testing.T) {
executor := newExecutor(nil, nil, newTestConfig(), fakeSessionCache{})
now := time.Now()
origNewCWClient := NewCWClient
t.Cleanup(func() {
NewCWClient = origNewCWClient
})
var cwClient FakeCWClient
NewCWClient = func(sess *session.Session) cloudwatchiface.CloudWatchAPI {
return cwClient
}
t.Run("Custom metrics", func(t *testing.T) {
cwClient = FakeCWClient{
CloudWatchAPI: nil,
GetMetricDataOutput: cloudwatch.GetMetricDataOutput{
NextToken: nil,
Messages: []*cloudwatch.MessageData{},
MetricDataResults: []*cloudwatch.MetricDataResult{
{
StatusCode: aws.String("Complete"), Id: aws.String("a"), Label: aws.String("NetworkOut"), Values: []*float64{aws.Float64(1.0)}, Timestamps: []*time.Time{&now},
},
{
StatusCode: aws.String("Complete"), Id: aws.String("b"), Label: aws.String("NetworkIn"), Values: []*float64{aws.Float64(1.0)}, Timestamps: []*time.Time{&now},
},
},
},
}
im := datasource.NewInstanceManager(func(s backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
return datasourceInfo{}, nil
})
executor := newExecutor(nil, im, newTestConfig(), fakeSessionCache{})
resp, err := executor.QueryData(context.Background(), &backend.QueryDataRequest{
PluginContext: backend.PluginContext{
DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{},
},
Queries: []backend.DataQuery{
{
RefID: "A",
TimeRange: backend.TimeRange{
From: now.Add(time.Hour * -2),
To: now.Add(time.Hour * -1),
},
JSON: json.RawMessage(`{
"type": "timeSeriesQuery",
"subtype": "metrics",
"namespace": "AWS/EC2",
"metricName": "NetworkOut",
"expression": "",
"dimensions": {
"InstanceId": "i-00645d91ed77d87ac"
},
"region": "us-east-2",
"id": "a",
"alias": "NetworkOut",
"statistics": [
"Maximum"
],
"period": "300",
"hide": false,
"matchExact": true,
"refId": "A"
}`),
},
{
RefID: "B",
TimeRange: backend.TimeRange{
From: now.Add(time.Hour * -2),
To: now.Add(time.Hour * -1),
},
JSON: json.RawMessage(`{
"type": "timeSeriesQuery",
"subtype": "metrics",
"namespace": "AWS/EC2",
"metricName": "NetworkIn",
"expression": "",
"dimensions": {
"InstanceId": "i-00645d91ed77d87ac"
},
"region": "us-east-2",
"id": "b",
"alias": "NetworkIn",
"statistics": [
"Maximum"
],
"period": "300",
"matchExact": true,
"refId": "B"
}`),
},
},
})
require.NoError(t, err)
assert.Equal(t, "NetworkOut", resp.Responses["A"].Frames[0].Name)
assert.Equal(t, "NetworkIn", resp.Responses["B"].Frames[0].Name)
})
t.Run("End time before start time should result in error", func(t *testing.T) {
_, err := executor.executeTimeSeriesQuery(context.TODO(), &backend.QueryDataRequest{Queries: []backend.DataQuery{{TimeRange: backend.TimeRange{
From: now.Add(time.Hour * -1),