mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Cloud Monitoring: Fix missing data when result is paginated (#56270)
* Added nextPageToken prop * Adding first and pageToken condition to while loop * clean up * revert gitignore * fix go lint * Added logic to builder too * Removed pageSize - was for local testing * gofmt * extracted doRequest function * extracted doRequest in query too * Adressed filter comments * Adressed query comments * go fmt * removed pageSize added for testing * go fmt again
This commit is contained in:
@@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
@@ -17,6 +18,22 @@ import (
|
|||||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) doRequestFilterPage(ctx context.Context, r *http.Request, dsInfo datasourceInfo) (cloudMonitoringResponse, error) {
|
||||||
|
r.URL.RawQuery = timeSeriesFilter.Params.Encode()
|
||||||
|
r = r.WithContext(ctx)
|
||||||
|
res, err := dsInfo.services[cloudMonitor].client.Do(r)
|
||||||
|
if err != nil {
|
||||||
|
return cloudMonitoringResponse{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dnext, err := unmarshalResponse(res)
|
||||||
|
if err != nil {
|
||||||
|
return cloudMonitoringResponse{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return dnext, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context, req *backend.QueryDataRequest,
|
func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context, req *backend.QueryDataRequest,
|
||||||
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
|
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
|
||||||
dr := &backend.DataResponse{}
|
dr := &backend.DataResponse{}
|
||||||
@@ -30,16 +47,12 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context
|
|||||||
}
|
}
|
||||||
slog.Info("No project name set on query, using project name from datasource", "projectName", projectName)
|
slog.Info("No project name set on query, using project name from datasource", "projectName", projectName)
|
||||||
}
|
}
|
||||||
|
|
||||||
r, err := s.createRequest(ctx, &dsInfo, path.Join("/v3/projects", projectName, "timeSeries"), nil)
|
r, err := s.createRequest(ctx, &dsInfo, path.Join("/v3/projects", projectName, "timeSeries"), nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dr.Error = err
|
dr.Error = err
|
||||||
return dr, cloudMonitoringResponse{}, "", nil
|
return dr, cloudMonitoringResponse{}, "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
r.URL.RawQuery = timeSeriesFilter.Params.Encode()
|
|
||||||
alignmentPeriod, ok := r.URL.Query()["aggregation.alignmentPeriod"]
|
alignmentPeriod, ok := r.URL.Query()["aggregation.alignmentPeriod"]
|
||||||
|
|
||||||
if ok {
|
if ok {
|
||||||
seconds, err := strconv.ParseInt(alignmentPeriodRe.FindString(alignmentPeriod[0]), 10, 64)
|
seconds, err := strconv.ParseInt(alignmentPeriodRe.FindString(alignmentPeriod[0]), 10, 64)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@@ -63,22 +76,25 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context
|
|||||||
span.SetAttributes("until", req.Queries[0].TimeRange.To, attribute.Key("until").String(req.Queries[0].TimeRange.To.String()))
|
span.SetAttributes("until", req.Queries[0].TimeRange.To, attribute.Key("until").String(req.Queries[0].TimeRange.To.String()))
|
||||||
span.SetAttributes("datasource_id", dsInfo.id, attribute.Key("datasource_id").Int64(dsInfo.id))
|
span.SetAttributes("datasource_id", dsInfo.id, attribute.Key("datasource_id").Int64(dsInfo.id))
|
||||||
span.SetAttributes("org_id", req.PluginContext.OrgID, attribute.Key("org_id").Int64(req.PluginContext.OrgID))
|
span.SetAttributes("org_id", req.PluginContext.OrgID, attribute.Key("org_id").Int64(req.PluginContext.OrgID))
|
||||||
|
|
||||||
defer span.End()
|
defer span.End()
|
||||||
tracer.Inject(ctx, r.Header, span)
|
tracer.Inject(ctx, r.Header, span)
|
||||||
|
|
||||||
r = r.WithContext(ctx)
|
d, err := timeSeriesFilter.doRequestFilterPage(ctx, r, dsInfo)
|
||||||
res, err := dsInfo.services[cloudMonitor].client.Do(r)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dr.Error = err
|
dr.Error = err
|
||||||
return dr, cloudMonitoringResponse{}, "", nil
|
return dr, cloudMonitoringResponse{}, "", nil
|
||||||
}
|
}
|
||||||
|
nextPageToken := d.NextPageToken
|
||||||
d, err := unmarshalResponse(res)
|
for nextPageToken != "" {
|
||||||
|
timeSeriesFilter.Params["pageToken"] = []string{d.NextPageToken}
|
||||||
|
nextPage, err := timeSeriesFilter.doRequestFilterPage(ctx, r, dsInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dr.Error = err
|
dr.Error = err
|
||||||
return dr, cloudMonitoringResponse{}, "", nil
|
return dr, cloudMonitoringResponse{}, "", nil
|
||||||
}
|
}
|
||||||
|
d.TimeSeries = append(d.TimeSeries, nextPage.TimeSeries...)
|
||||||
|
nextPageToken = nextPage.NextPageToken
|
||||||
|
}
|
||||||
|
|
||||||
return dr, d, r.URL.RawQuery, nil
|
return dr, d, r.URL.RawQuery, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,16 +5,17 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"go.opentelemetry.io/otel/attribute"
|
|
||||||
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
|
"go.opentelemetry.io/otel/attribute"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||||
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
|
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
|
||||||
@@ -35,6 +36,24 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) appendGraphPeriod(req *bac
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func doRequestQueryPage(requestBody map[string]interface{}, r *http.Request, dsInfo datasourceInfo) (cloudMonitoringResponse, error) {
|
||||||
|
buf, err := json.Marshal(requestBody)
|
||||||
|
if err != nil {
|
||||||
|
return cloudMonitoringResponse{}, err
|
||||||
|
}
|
||||||
|
r.Body = io.NopCloser(bytes.NewBuffer(buf))
|
||||||
|
res, err := dsInfo.services[cloudMonitor].client.Do(r)
|
||||||
|
if err != nil {
|
||||||
|
return cloudMonitoringResponse{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dnext, err := unmarshalResponse(res)
|
||||||
|
if err != nil {
|
||||||
|
return cloudMonitoringResponse{}, err
|
||||||
|
}
|
||||||
|
return dnext, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, req *backend.QueryDataRequest,
|
func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, req *backend.QueryDataRequest,
|
||||||
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
|
s *Service, dsInfo datasourceInfo, tracer tracing.Tracer) (*backend.DataResponse, cloudMonitoringResponse, string, error) {
|
||||||
dr := &backend.DataResponse{}
|
dr := &backend.DataResponse{}
|
||||||
@@ -55,40 +74,43 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, r
|
|||||||
to := req.Queries[0].TimeRange.To
|
to := req.Queries[0].TimeRange.To
|
||||||
timeFormat := "2006/01/02-15:04:05"
|
timeFormat := "2006/01/02-15:04:05"
|
||||||
timeSeriesQuery.Query += fmt.Sprintf(" | within d'%s', d'%s'", from.UTC().Format(timeFormat), to.UTC().Format(timeFormat))
|
timeSeriesQuery.Query += fmt.Sprintf(" | within d'%s', d'%s'", from.UTC().Format(timeFormat), to.UTC().Format(timeFormat))
|
||||||
|
p := path.Join("/v3/projects", projectName, "timeSeries:query")
|
||||||
buf, err := json.Marshal(map[string]interface{}{
|
|
||||||
"query": timeSeriesQuery.Query,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
dr.Error = err
|
|
||||||
return dr, cloudMonitoringResponse{}, "", nil
|
|
||||||
}
|
|
||||||
r, err := s.createRequest(ctx, &dsInfo, path.Join("/v3/projects", projectName, "timeSeries:query"), bytes.NewBuffer(buf))
|
|
||||||
if err != nil {
|
|
||||||
dr.Error = err
|
|
||||||
return dr, cloudMonitoringResponse{}, "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx, span := tracer.Start(ctx, "cloudMonitoring MQL query")
|
ctx, span := tracer.Start(ctx, "cloudMonitoring MQL query")
|
||||||
span.SetAttributes("query", timeSeriesQuery.Query, attribute.Key("query").String(timeSeriesQuery.Query))
|
span.SetAttributes("query", timeSeriesQuery.Query, attribute.Key("query").String(timeSeriesQuery.Query))
|
||||||
span.SetAttributes("from", req.Queries[0].TimeRange.From, attribute.Key("from").String(req.Queries[0].TimeRange.From.String()))
|
span.SetAttributes("from", req.Queries[0].TimeRange.From, attribute.Key("from").String(req.Queries[0].TimeRange.From.String()))
|
||||||
span.SetAttributes("until", req.Queries[0].TimeRange.To, attribute.Key("until").String(req.Queries[0].TimeRange.To.String()))
|
span.SetAttributes("until", req.Queries[0].TimeRange.To, attribute.Key("until").String(req.Queries[0].TimeRange.To.String()))
|
||||||
|
|
||||||
defer span.End()
|
defer span.End()
|
||||||
tracer.Inject(ctx, r.Header, span)
|
|
||||||
|
|
||||||
r = r.WithContext(ctx)
|
requestBody := map[string]interface{}{
|
||||||
res, err := dsInfo.services[cloudMonitor].client.Do(r)
|
"query": timeSeriesQuery.Query,
|
||||||
|
}
|
||||||
|
r, err := s.createRequest(ctx, &dsInfo, p, bytes.NewBuffer([]byte{}))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dr.Error = err
|
dr.Error = err
|
||||||
return dr, cloudMonitoringResponse{}, "", nil
|
return dr, cloudMonitoringResponse{}, "", nil
|
||||||
}
|
}
|
||||||
|
tracer.Inject(ctx, r.Header, span)
|
||||||
|
r = r.WithContext(ctx)
|
||||||
|
|
||||||
d, err := unmarshalResponse(res)
|
d, err := doRequestQueryPage(requestBody, r, dsInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dr.Error = err
|
dr.Error = err
|
||||||
return dr, cloudMonitoringResponse{}, "", nil
|
return dr, cloudMonitoringResponse{}, "", nil
|
||||||
}
|
}
|
||||||
|
for d.NextPageToken != "" {
|
||||||
|
requestBody := map[string]interface{}{
|
||||||
|
"query": timeSeriesQuery.Query,
|
||||||
|
"pageToken": d.NextPageToken,
|
||||||
|
}
|
||||||
|
nextPage, err := doRequestQueryPage(requestBody, r, dsInfo)
|
||||||
|
if err != nil {
|
||||||
|
dr.Error = err
|
||||||
|
return dr, cloudMonitoringResponse{}, "", nil
|
||||||
|
}
|
||||||
|
d.TimeSeriesData = append(d.TimeSeriesData, nextPage.TimeSeriesData...)
|
||||||
|
d.NextPageToken = nextPage.NextPageToken
|
||||||
|
}
|
||||||
|
|
||||||
return dr, d, timeSeriesQuery.Query, nil
|
return dr, d, timeSeriesQuery.Query, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -100,6 +100,7 @@ type (
|
|||||||
TimeSeriesDescriptor timeSeriesDescriptor `json:"timeSeriesDescriptor"`
|
TimeSeriesDescriptor timeSeriesDescriptor `json:"timeSeriesDescriptor"`
|
||||||
TimeSeriesData timeSeriesData `json:"timeSeriesData"`
|
TimeSeriesData timeSeriesData `json:"timeSeriesData"`
|
||||||
Unit string `json:"unit"`
|
Unit string `json:"unit"`
|
||||||
|
NextPageToken string `json:"nextPageToken"`
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user