From 776283985336a7d5f30b1d8070c90f22e46c4fbb Mon Sep 17 00:00:00 2001 From: Arve Knudsen Date: Tue, 8 Sep 2020 13:35:17 +0200 Subject: [PATCH] Convert CloudWatch to use dataframes (#26702) * CloudWatch: Convert metrics queries to return data frames Signed-off-by: Arve Knudsen --- .../cloudwatch/get_metric_data_executor.go | 4 +- .../get_metric_data_executor_test.go | 2 + pkg/tsdb/cloudwatch/log_query.go | 8 +- pkg/tsdb/cloudwatch/query_transformer.go | 22 +++-- pkg/tsdb/cloudwatch/request_parser.go | 7 +- pkg/tsdb/cloudwatch/response_parser.go | 93 ++++++++++++------- pkg/tsdb/cloudwatch/response_parser_test.go | 83 +++++++++-------- pkg/tsdb/cloudwatch/time_series_query.go | 6 +- pkg/tsdb/cloudwatch/types.go | 11 +-- pkg/tsdb/models.go | 2 +- 10 files changed, 134 insertions(+), 104 deletions(-) diff --git a/pkg/tsdb/cloudwatch/get_metric_data_executor.go b/pkg/tsdb/cloudwatch/get_metric_data_executor.go index 4ca62c46e76..c81688b40da 100644 --- a/pkg/tsdb/cloudwatch/get_metric_data_executor.go +++ b/pkg/tsdb/cloudwatch/get_metric_data_executor.go @@ -5,10 +5,12 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/cloudwatch" + "github.com/aws/aws-sdk-go/service/cloudwatch/cloudwatchiface" "github.com/grafana/grafana/pkg/infra/metrics" ) -func (e *cloudWatchExecutor) executeRequest(ctx context.Context, client cloudWatchClient, metricDataInput *cloudwatch.GetMetricDataInput) ([]*cloudwatch.GetMetricDataOutput, error) { +func (e *cloudWatchExecutor) executeRequest(ctx context.Context, client cloudwatchiface.CloudWatchAPI, + metricDataInput *cloudwatch.GetMetricDataInput) ([]*cloudwatch.GetMetricDataOutput, error) { mdo := make([]*cloudwatch.GetMetricDataOutput, 0) nextToken := "" diff --git a/pkg/tsdb/cloudwatch/get_metric_data_executor_test.go b/pkg/tsdb/cloudwatch/get_metric_data_executor_test.go index 090281dc334..46612e4ed43 100644 --- a/pkg/tsdb/cloudwatch/get_metric_data_executor_test.go +++ b/pkg/tsdb/cloudwatch/get_metric_data_executor_test.go @@ -7,6 +7,7 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/cloudwatch" + "github.com/aws/aws-sdk-go/service/cloudwatch/cloudwatchiface" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -14,6 +15,7 @@ import ( var counter = 1 type cloudWatchFakeClient struct { + cloudwatchiface.CloudWatchAPI } func (client *cloudWatchFakeClient) GetMetricDataWithContext(ctx aws.Context, input *cloudwatch.GetMetricDataInput, opts ...request.Option) (*cloudwatch.GetMetricDataOutput, error) { diff --git a/pkg/tsdb/cloudwatch/log_query.go b/pkg/tsdb/cloudwatch/log_query.go index 3f8941184d0..10d02b84661 100644 --- a/pkg/tsdb/cloudwatch/log_query.go +++ b/pkg/tsdb/cloudwatch/log_query.go @@ -17,8 +17,8 @@ func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput) (*d } nonEmptyRows := make([][]*cloudwatchlogs.ResultField, 0) - // Sometimes CloudWatch can send empty rows for _, row := range response.Results { + // Sometimes CloudWatch can send empty rows if len(row) == 0 { continue } @@ -26,7 +26,7 @@ func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput) (*d if row[0].Value == nil { continue } - // Sometimes it sends row with only timestamp + // Sometimes it sends rows with only timestamp if _, err := time.Parse(cloudWatchTSFormat, *row[0].Value); err == nil { continue } @@ -52,7 +52,7 @@ func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput) (*d if _, exists := fieldValues[*resultField.Field]; !exists { fieldNames = append(fieldNames, *resultField.Field) - // Check if field is time field + // Check if it's a time field if _, err := time.Parse(cloudWatchTSFormat, *resultField.Value); err == nil { fieldValues[*resultField.Field] = make([]*time.Time, rowCount) } else if _, err := strconv.ParseFloat(*resultField.Value, 64); err == nil { @@ -81,7 +81,7 @@ func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput) (*d } } - newFields := make([]*data.Field, 0) + newFields := make([]*data.Field, 0, len(fieldNames)) for _, fieldName := range fieldNames { newFields = append(newFields, data.NewField(fieldName, nil, fieldValues[fieldName])) diff --git a/pkg/tsdb/cloudwatch/query_transformer.go b/pkg/tsdb/cloudwatch/query_transformer.go index ca4b207aafc..8a17f7a73bb 100644 --- a/pkg/tsdb/cloudwatch/query_transformer.go +++ b/pkg/tsdb/cloudwatch/query_transformer.go @@ -5,6 +5,7 @@ import ( "sort" "strings" + "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/tsdb" ) @@ -13,7 +14,9 @@ import ( // has more than one statistic defined, one cloudwatchQuery will be created for each statistic. // If the query doesn't have an Id defined by the user, we'll give it an with format `query[RefId]`. In the case // the incoming query had more than one stat, it will ge an id like `query[RefId]_[StatName]`, eg queryC_Average -func (e *cloudWatchExecutor) transformRequestQueriesToCloudWatchQueries(requestQueries []*requestQuery) (map[string]*cloudWatchQuery, error) { +func (e *cloudWatchExecutor) transformRequestQueriesToCloudWatchQueries(requestQueries []*requestQuery) ( + map[string]*cloudWatchQuery, error) { + plog.Debug("Transforming CloudWatch request queries") cloudwatchQueries := make(map[string]*cloudWatchQuery) for _, requestQuery := range requestQueries { for _, stat := range requestQuery.Statistics { @@ -52,17 +55,22 @@ func (e *cloudWatchExecutor) transformRequestQueriesToCloudWatchQueries(requestQ func (e *cloudWatchExecutor) transformQueryResponseToQueryResult(cloudwatchResponses []*cloudwatchResponse) map[string]*tsdb.QueryResult { responsesByRefID := make(map[string][]*cloudwatchResponse) + refIDs := sort.StringSlice{} for _, res := range cloudwatchResponses { + refIDs = append(refIDs, res.RefId) responsesByRefID[res.RefId] = append(responsesByRefID[res.RefId], res) } + // Ensure stable results + refIDs.Sort() results := make(map[string]*tsdb.QueryResult) - for refID, responses := range responsesByRefID { + for _, refID := range refIDs { + responses := responsesByRefID[refID] queryResult := tsdb.NewQueryResult() queryResult.RefId = refID queryResult.Meta = simplejson.New() queryResult.Series = tsdb.TimeSeriesSlice{} - timeSeries := make(tsdb.TimeSeriesSlice, 0) + frames := make(data.Frames, 0, len(responses)) requestExceededMaxLimit := false partialData := false @@ -72,7 +80,7 @@ func (e *cloudWatchExecutor) transformQueryResponseToQueryResult(cloudwatchRespo }{} for _, response := range responses { - timeSeries = append(timeSeries, *response.series...) + frames = append(frames, response.DataFrames...) requestExceededMaxLimit = requestExceededMaxLimit || response.RequestExceededMaxLimit partialData = partialData || response.PartialData queryMeta = append(queryMeta, struct { @@ -85,8 +93,8 @@ func (e *cloudWatchExecutor) transformQueryResponseToQueryResult(cloudwatchRespo }) } - sort.Slice(timeSeries, func(i, j int) bool { - return timeSeries[i].Name < timeSeries[j].Name + sort.Slice(frames, func(i, j int) bool { + return frames[i].Name < frames[j].Name }) if requestExceededMaxLimit { @@ -96,7 +104,7 @@ func (e *cloudWatchExecutor) transformQueryResponseToQueryResult(cloudwatchRespo queryResult.ErrorString = "Cloudwatch GetMetricData error: Too many datapoints requested - your search has been limited. Please try to reduce the time range" } - queryResult.Series = append(queryResult.Series, timeSeries...) + queryResult.Dataframes = tsdb.NewDecodedDataFrames(frames) queryResult.Meta.Set("gmdMeta", queryMeta) results[refID] = queryResult } diff --git a/pkg/tsdb/cloudwatch/request_parser.go b/pkg/tsdb/cloudwatch/request_parser.go index f0a497c19d0..ab8980fb306 100644 --- a/pkg/tsdb/cloudwatch/request_parser.go +++ b/pkg/tsdb/cloudwatch/request_parser.go @@ -17,13 +17,13 @@ import ( // Parses the json queries and returns a requestQuery. The requestQuery has a 1 to 1 mapping to a query editor row func (e *cloudWatchExecutor) parseQueries(queryContext *tsdb.TsdbQuery, startTime time.Time, endTime time.Time) (map[string][]*requestQuery, error) { requestQueries := make(map[string][]*requestQuery) - for i, model := range queryContext.Queries { - queryType := model.Model.Get("type").MustString() + for i, query := range queryContext.Queries { + queryType := query.Model.Get("type").MustString() if queryType != "timeSeriesQuery" && queryType != "" { continue } - refID := queryContext.Queries[i].RefId + refID := query.RefId query, err := parseRequestQuery(queryContext.Queries[i].Model, refID, startTime, endTime) if err != nil { return nil, &queryError{err: err, RefID: refID} @@ -39,6 +39,7 @@ func (e *cloudWatchExecutor) parseQueries(queryContext *tsdb.TsdbQuery, startTim } func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time, endTime time.Time) (*requestQuery, error) { + plog.Debug("Parsing request query", "query", model) reNumber := regexp.MustCompile(`^\d+$`) region, err := model.Get("region").String() if err != nil { diff --git a/pkg/tsdb/cloudwatch/response_parser.go b/pkg/tsdb/cloudwatch/response_parser.go index c05308302d1..9977b03e8a7 100644 --- a/pkg/tsdb/cloudwatch/response_parser.go +++ b/pkg/tsdb/cloudwatch/response_parser.go @@ -8,11 +8,12 @@ import ( "time" "github.com/aws/aws-sdk-go/service/cloudwatch" - "github.com/grafana/grafana/pkg/components/null" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana-plugin-sdk-go/data" ) -func (e *cloudWatchExecutor) parseResponse(metricDataOutputs []*cloudwatch.GetMetricDataOutput, queries map[string]*cloudWatchQuery) ([]*cloudwatchResponse, error) { +func (e *cloudWatchExecutor) parseResponse(metricDataOutputs []*cloudwatch.GetMetricDataOutput, + queries map[string]*cloudWatchQuery) ([]*cloudwatchResponse, error) { + plog.Debug("Parsing metric data output", "queries", queries) // Map from result ID -> label -> result mdrs := make(map[string]map[string]*cloudwatch.MetricDataResult) labels := map[string][]string{} @@ -48,14 +49,15 @@ func (e *cloudWatchExecutor) parseResponse(metricDataOutputs []*cloudwatch.GetMe cloudWatchResponses := make([]*cloudwatchResponse, 0) for id, lr := range mdrs { + plog.Debug("Handling metric data results", "id", id, "lr", lr) query := queries[id] - series, partialData, err := parseGetMetricDataTimeSeries(lr, labels[id], query) + frames, partialData, err := parseGetMetricDataTimeSeries(lr, labels[id], query) if err != nil { return nil, err } response := &cloudwatchResponse{ - series: series, + DataFrames: frames, Period: query.Period, Expression: query.UsedExpression, RefId: query.RefId, @@ -70,12 +72,15 @@ func (e *cloudWatchExecutor) parseResponse(metricDataOutputs []*cloudwatch.GetMe } func parseGetMetricDataTimeSeries(metricDataResults map[string]*cloudwatch.MetricDataResult, labels []string, - query *cloudWatchQuery) (*tsdb.TimeSeriesSlice, bool, error) { + query *cloudWatchQuery) (data.Frames, bool, error) { + plog.Debug("Parsing metric data results", "results", metricDataResults) partialData := false - result := tsdb.TimeSeriesSlice{} + frames := data.Frames{} for _, label := range labels { metricDataResult := metricDataResults[label] + plog.Debug("Processing metric data result", "label", label, "statusCode", metricDataResult.StatusCode) if *metricDataResult.StatusCode != "Complete" { + plog.Debug("Handling a partial result") partialData = true } @@ -85,8 +90,8 @@ func parseGetMetricDataTimeSeries(metricDataResults map[string]*cloudwatch.Metri } } - // In case a multi-valued dimension is used and the cloudwatch query yields no values, create one empty time series for each dimension value. - // Use that dimension value to expand the alias field + // In case a multi-valued dimension is used and the cloudwatch query yields no values, create one empty time + // series for each dimension value. Use that dimension value to expand the alias field if len(metricDataResult.Values) == 0 && query.isMultiValuedDimensionExpression() { series := 0 multiValuedDimension := "" @@ -98,62 +103,78 @@ func parseGetMetricDataTimeSeries(metricDataResults map[string]*cloudwatch.Metri } for _, value := range query.Dimensions[multiValuedDimension] { - emptySeries := tsdb.TimeSeries{ - Tags: map[string]string{multiValuedDimension: value}, - Points: make([]tsdb.TimePoint, 0), - } + tags := map[string]string{multiValuedDimension: value} for key, values := range query.Dimensions { if key != multiValuedDimension && len(values) > 0 { - emptySeries.Tags[key] = values[0] + tags[key] = values[0] } } - emptySeries.Name = formatAlias(query, query.Stats, emptySeries.Tags, label) - result = append(result, &emptySeries) + emptyFrame := data.Frame{ + Name: formatAlias(query, query.Stats, tags, label), + Fields: []*data.Field{ + data.NewField("timestamp", nil, []float64{}), + data.NewField("value", tags, []*float64{}), + }, + } + frames = append(frames, &emptyFrame) } } else { - keys := make([]string, 0) + dims := make([]string, 0, len(query.Dimensions)) for k := range query.Dimensions { - keys = append(keys, k) + dims = append(dims, k) } - sort.Strings(keys) + sort.Strings(dims) - series := tsdb.TimeSeries{ - Tags: make(map[string]string), - Points: make([]tsdb.TimePoint, 0), - } - - for _, key := range keys { - values := query.Dimensions[key] + tags := data.Labels{} + for _, dim := range dims { + plog.Debug("Handling dimension", "dimension", dim) + values := query.Dimensions[dim] if len(values) == 1 && values[0] != "*" { - series.Tags[key] = values[0] + plog.Debug("Got a tag value", "tag", dim, "value", values[0]) + tags[dim] = values[0] } else { for _, value := range values { if value == label || value == "*" { - series.Tags[key] = label + plog.Debug("Got a tag value", "tag", dim, "value", value, "label", label) + tags[dim] = label } else if strings.Contains(label, value) { - series.Tags[key] = value + plog.Debug("Got a tag value", "tag", dim, "value", value, "label", label) + tags[dim] = value } } } } - series.Name = formatAlias(query, query.Stats, series.Tags, label) - + timestamps := []float64{} + points := []*float64{} for j, t := range metricDataResult.Timestamps { if j > 0 { expectedTimestamp := metricDataResult.Timestamps[j-1].Add(time.Duration(query.Period) * time.Second) if expectedTimestamp.Before(*t) { - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), float64(expectedTimestamp.Unix()*1000))) + timestamps = append(timestamps, float64(expectedTimestamp.Unix()*1000)) + points = append(points, nil) } } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(*metricDataResult.Values[j]), - float64(t.Unix())*1000)) + val := metricDataResult.Values[j] + plog.Debug("Handling timestamp", "timestamp", t, "value", *val) + timestamps = append(timestamps, float64(t.Unix()*1000)) + points = append(points, val) } - result = append(result, &series) + + fields := []*data.Field{ + data.NewField("timestamp", nil, timestamps), + data.NewField("value", tags, points), + } + frame := data.Frame{ + Name: formatAlias(query, query.Stats, tags, label), + Fields: fields, + } + frames = append(frames, &frame) } } - return &result, partialData, nil + + return frames, partialData, nil } func formatAlias(query *cloudWatchQuery, stat string, dimensions map[string]string, label string) string { diff --git a/pkg/tsdb/cloudwatch/response_parser_test.go b/pkg/tsdb/cloudwatch/response_parser_test.go index 5d8f1471889..7c1c1c3072d 100644 --- a/pkg/tsdb/cloudwatch/response_parser_test.go +++ b/pkg/tsdb/cloudwatch/response_parser_test.go @@ -6,7 +6,6 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/cloudwatch" - "github.com/grafana/grafana/pkg/components/null" . "github.com/smartystreets/goconvey/convey" ) @@ -61,17 +60,17 @@ func TestCloudWatchResponseParser(t *testing.T) { Period: 60, Alias: "{{LoadBalancer}} Expanded", } - series, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) - timeSeries := (*series)[0] - + frames, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) So(err, ShouldBeNil) - So(partialData, ShouldBeFalse) - So(timeSeries.Name, ShouldEqual, "lb1 Expanded") - So(timeSeries.Tags["LoadBalancer"], ShouldEqual, "lb1") - timeSeries2 := (*series)[1] - So(timeSeries2.Name, ShouldEqual, "lb2 Expanded") - So(timeSeries2.Tags["LoadBalancer"], ShouldEqual, "lb2") + frame1 := frames[0] + So(partialData, ShouldBeFalse) + So(frame1.Name, ShouldEqual, "lb1 Expanded") + So(frame1.Fields[1].Labels["LoadBalancer"], ShouldEqual, "lb1") + + frame2 := frames[1] + So(frame2.Name, ShouldEqual, "lb2 Expanded") + So(frame2.Fields[1].Labels["LoadBalancer"], ShouldEqual, "lb2") }) Convey("can expand dimension value using substring", func() { @@ -123,16 +122,17 @@ func TestCloudWatchResponseParser(t *testing.T) { Period: 60, Alias: "{{LoadBalancer}} Expanded", } - series, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) - timeSeries := (*series)[0] + frames, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) So(err, ShouldBeNil) - So(partialData, ShouldBeFalse) - So(timeSeries.Name, ShouldEqual, "lb1 Expanded") - So(timeSeries.Tags["LoadBalancer"], ShouldEqual, "lb1") - timeSeries2 := (*series)[1] - So(timeSeries2.Name, ShouldEqual, "lb2 Expanded") - So(timeSeries2.Tags["LoadBalancer"], ShouldEqual, "lb2") + frame1 := frames[0] + So(partialData, ShouldBeFalse) + So(frame1.Name, ShouldEqual, "lb1 Expanded") + So(frame1.Fields[1].Labels["LoadBalancer"], ShouldEqual, "lb1") + + frame2 := frames[1] + So(frame2.Name, ShouldEqual, "lb2 Expanded") + So(frame2.Fields[1].Labels["LoadBalancer"], ShouldEqual, "lb2") }) Convey("can expand dimension value using wildcard", func() { @@ -184,12 +184,12 @@ func TestCloudWatchResponseParser(t *testing.T) { Period: 60, Alias: "{{LoadBalancer}} Expanded", } - series, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) - + frames, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) So(err, ShouldBeNil) + So(partialData, ShouldBeFalse) - So((*series)[0].Name, ShouldEqual, "lb3 Expanded") - So((*series)[1].Name, ShouldEqual, "lb4 Expanded") + So(frames[0].Name, ShouldEqual, "lb3 Expanded") + So(frames[1].Name, ShouldEqual, "lb4 Expanded") }) Convey("can expand dimension value when no values are returned and a multi-valued template variable is used", func() { @@ -221,13 +221,13 @@ func TestCloudWatchResponseParser(t *testing.T) { Period: 60, Alias: "{{LoadBalancer}} Expanded", } - series, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) - + frames, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) So(err, ShouldBeNil) + So(partialData, ShouldBeFalse) - So(len(*series), ShouldEqual, 2) - So((*series)[0].Name, ShouldEqual, "lb1 Expanded") - So((*series)[1].Name, ShouldEqual, "lb2 Expanded") + So(len(frames), ShouldEqual, 2) + So(frames[0].Name, ShouldEqual, "lb1 Expanded") + So(frames[1].Name, ShouldEqual, "lb2 Expanded") }) Convey("can expand dimension value when no values are returned and a multi-valued template variable and two single-valued dimensions are used", func() { @@ -261,13 +261,13 @@ func TestCloudWatchResponseParser(t *testing.T) { Period: 60, Alias: "{{LoadBalancer}} Expanded {{InstanceType}} - {{Resource}}", } - series, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) - + frames, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) So(err, ShouldBeNil) + So(partialData, ShouldBeFalse) - So(len(*series), ShouldEqual, 2) - So((*series)[0].Name, ShouldEqual, "lb1 Expanded micro - res") - So((*series)[1].Name, ShouldEqual, "lb2 Expanded micro - res") + So(len(frames), ShouldEqual, 2) + So(frames[0].Name, ShouldEqual, "lb1 Expanded micro - res") + So(frames[1].Name, ShouldEqual, "lb2 Expanded micro - res") }) Convey("can parse cloudwatch response", func() { @@ -304,17 +304,18 @@ func TestCloudWatchResponseParser(t *testing.T) { Period: 60, Alias: "{{namespace}}_{{metric}}_{{stat}}", } - series, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) - timeSeries := (*series)[0] - + frames, partialData, err := parseGetMetricDataTimeSeries(mdrs, labels, query) So(err, ShouldBeNil) + + frame := frames[0] So(partialData, ShouldBeFalse) - So(timeSeries.Name, ShouldEqual, "AWS/ApplicationELB_TargetResponseTime_Average") - So(timeSeries.Tags["LoadBalancer"], ShouldEqual, "lb") - So(timeSeries.Points[0][0].String(), ShouldEqual, null.FloatFrom(10.0).String()) - So(timeSeries.Points[1][0].String(), ShouldEqual, null.FloatFrom(20.0).String()) - So(timeSeries.Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String()) - So(timeSeries.Points[3][0].String(), ShouldEqual, null.FloatFrom(30.0).String()) + So(frame.Name, ShouldEqual, "AWS/ApplicationELB_TargetResponseTime_Average") + So(frame.Fields[1].Labels["LoadBalancer"], ShouldEqual, "lb") + So(frame.Fields[1].Len(), ShouldEqual, 4) + So(*frame.Fields[1].At(0).(*float64), ShouldEqual, 10.0) + So(*frame.Fields[1].At(1).(*float64), ShouldEqual, 20.0) + So(frame.Fields[1].At(2).(*float64), ShouldBeNil) + So(*frame.Fields[1].At(3).(*float64), ShouldEqual, 30.0) }) }) } diff --git a/pkg/tsdb/cloudwatch/time_series_query.go b/pkg/tsdb/cloudwatch/time_series_query.go index 58fe7f2e37b..2fd1852bcbd 100644 --- a/pkg/tsdb/cloudwatch/time_series_query.go +++ b/pkg/tsdb/cloudwatch/time_series_query.go @@ -6,17 +6,19 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/util/errutil" "golang.org/x/sync/errgroup" ) func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { + plog.Debug("Executing time series query") startTime, err := queryContext.TimeRange.ParseFrom() if err != nil { - return nil, err + return nil, errutil.Wrap("failed to parse start time", err) } endTime, err := queryContext.TimeRange.ParseTo() if err != nil { - return nil, err + return nil, errutil.Wrap("failed to parse end time", err) } if !startTime.Before(endTime) { return nil, fmt.Errorf("invalid time range: start time must be before end time") diff --git a/pkg/tsdb/cloudwatch/types.go b/pkg/tsdb/cloudwatch/types.go index c269571a5d6..fb360c52bb8 100644 --- a/pkg/tsdb/cloudwatch/types.go +++ b/pkg/tsdb/cloudwatch/types.go @@ -3,16 +3,9 @@ package cloudwatch import ( "fmt" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/request" - "github.com/aws/aws-sdk-go/service/cloudwatch" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana-plugin-sdk-go/data" ) -type cloudWatchClient interface { - GetMetricDataWithContext(ctx aws.Context, input *cloudwatch.GetMetricDataInput, opts ...request.Option) (*cloudwatch.GetMetricDataOutput, error) -} - type requestQuery struct { RefId string Region string @@ -31,7 +24,7 @@ type requestQuery struct { } type cloudwatchResponse struct { - series *tsdb.TimeSeriesSlice + DataFrames data.Frames Id string RefId string Expression string diff --git a/pkg/tsdb/models.go b/pkg/tsdb/models.go index 618ad0083cc..78c3cb7718c 100644 --- a/pkg/tsdb/models.go +++ b/pkg/tsdb/models.go @@ -89,7 +89,7 @@ func NewTimeSeries(name string, points TimeSeriesPoints) *TimeSeries { } } -// DataFrames interface for retrieving encoded and decoded data frames. +// DataFrames is an interface for retrieving encoded and decoded data frames. // // See NewDecodedDataFrames and NewEncodedDataFrames for more information. type DataFrames interface {