2018-09-04 06:21:58 -05:00
|
|
|
package stackdriver
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"encoding/json"
|
2018-09-10 10:49:13 -05:00
|
|
|
"errors"
|
2018-09-04 06:21:58 -05:00
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"path"
|
|
|
|
"regexp"
|
2018-09-10 07:49:41 -05:00
|
|
|
"time"
|
2018-09-04 06:21:58 -05:00
|
|
|
|
|
|
|
"golang.org/x/net/context/ctxhttp"
|
|
|
|
|
2018-09-09 15:52:12 -05:00
|
|
|
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
2018-09-10 04:31:53 -05:00
|
|
|
"github.com/grafana/grafana/pkg/components/null"
|
2018-09-11 15:41:24 -05:00
|
|
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
2018-09-04 06:21:58 -05:00
|
|
|
"github.com/grafana/grafana/pkg/log"
|
|
|
|
"github.com/grafana/grafana/pkg/models"
|
2018-09-10 10:49:13 -05:00
|
|
|
"github.com/grafana/grafana/pkg/plugins"
|
2018-09-04 06:21:58 -05:00
|
|
|
"github.com/grafana/grafana/pkg/setting"
|
|
|
|
"github.com/grafana/grafana/pkg/tsdb"
|
|
|
|
"github.com/opentracing/opentracing-go"
|
|
|
|
)
|
|
|
|
|
2018-09-11 08:52:37 -05:00
|
|
|
var slog log.Logger
|
|
|
|
|
|
|
|
// StackdriverExecutor executes queries for the Stackdriver datasource
|
2018-09-04 06:21:58 -05:00
|
|
|
type StackdriverExecutor struct {
|
2018-09-11 15:41:24 -05:00
|
|
|
httpClient *http.Client
|
|
|
|
dsInfo *models.DataSource
|
2018-09-04 06:21:58 -05:00
|
|
|
}
|
|
|
|
|
2018-09-11 08:52:37 -05:00
|
|
|
// NewStackdriverExecutor initializes a http client
|
2018-09-11 07:06:46 -05:00
|
|
|
func NewStackdriverExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
|
|
|
|
httpClient, err := dsInfo.GetHttpClient()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &StackdriverExecutor{
|
2018-09-11 15:41:24 -05:00
|
|
|
httpClient: httpClient,
|
|
|
|
dsInfo: dsInfo,
|
2018-09-11 07:06:46 -05:00
|
|
|
}, nil
|
2018-09-04 06:21:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func init() {
|
2018-09-11 08:52:37 -05:00
|
|
|
slog = log.New("tsdb.stackdriver")
|
2018-09-04 06:21:58 -05:00
|
|
|
tsdb.RegisterTsdbQueryEndpoint("stackdriver", NewStackdriverExecutor)
|
|
|
|
}
|
|
|
|
|
2018-09-11 07:06:46 -05:00
|
|
|
// Query takes in the frontend queries, parses them into the Stackdriver query format
|
|
|
|
// executes the queries against the Stackdriver API and parses the response into
|
|
|
|
// the time series or table format
|
2018-09-04 06:21:58 -05:00
|
|
|
func (e *StackdriverExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
2018-09-10 07:49:41 -05:00
|
|
|
result := &tsdb.Response{
|
|
|
|
Results: make(map[string]*tsdb.QueryResult),
|
|
|
|
}
|
|
|
|
|
2018-09-11 17:24:59 -05:00
|
|
|
queries, err := e.buildQueries(tsdbQuery)
|
2018-09-10 07:49:41 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-09-11 07:06:46 -05:00
|
|
|
for _, query := range queries {
|
2018-09-11 15:41:24 -05:00
|
|
|
queryRes, err := e.executeQuery(ctx, query, tsdbQuery)
|
2018-09-11 07:06:46 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
result.Results[query.RefID] = queryRes
|
2018-09-04 06:21:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
2018-09-11 17:24:59 -05:00
|
|
|
func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*StackdriverQuery, error) {
|
2018-09-11 07:06:46 -05:00
|
|
|
stackdriverQueries := []*StackdriverQuery{}
|
|
|
|
|
|
|
|
startTime, err := tsdbQuery.TimeRange.ParseFrom()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
endTime, err := tsdbQuery.TimeRange.ParseTo()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, query := range tsdbQuery.Queries {
|
|
|
|
var target string
|
|
|
|
|
|
|
|
if fullTarget, err := query.Model.Get("targetFull").String(); err == nil {
|
|
|
|
target = fixIntervalFormat(fullTarget)
|
|
|
|
} else {
|
|
|
|
target = fixIntervalFormat(query.Model.Get("target").MustString())
|
|
|
|
}
|
|
|
|
|
|
|
|
metricType := query.Model.Get("metricType").MustString()
|
|
|
|
|
|
|
|
params := url.Values{}
|
|
|
|
params.Add("interval.startTime", startTime.UTC().Format(time.RFC3339))
|
|
|
|
params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339))
|
2018-09-11 17:24:59 -05:00
|
|
|
params.Add("filter", "metric.type=\""+metricType+"\"")
|
|
|
|
setAggParams(¶ms, query)
|
2018-09-11 07:06:46 -05:00
|
|
|
|
|
|
|
if setting.Env == setting.DEV {
|
2018-09-11 08:52:37 -05:00
|
|
|
slog.Debug("Stackdriver request", "params", params)
|
2018-09-11 07:06:46 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
stackdriverQueries = append(stackdriverQueries, &StackdriverQuery{
|
|
|
|
Target: target,
|
|
|
|
Params: params,
|
|
|
|
RefID: query.RefId,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return stackdriverQueries, nil
|
|
|
|
}
|
|
|
|
|
2018-09-11 17:24:59 -05:00
|
|
|
func setAggParams(params *url.Values, query *tsdb.Query) {
|
|
|
|
primaryAggregation := query.Model.Get("primaryAggregation").MustString()
|
|
|
|
if primaryAggregation == "" {
|
|
|
|
primaryAggregation = "REDUCE_NONE"
|
|
|
|
}
|
|
|
|
|
|
|
|
if primaryAggregation == "REDUCE_NONE" {
|
|
|
|
params.Add("aggregation.perSeriesAligner", "ALIGN_NONE")
|
|
|
|
} else {
|
|
|
|
params.Add("aggregation.crossSeriesReducer", primaryAggregation)
|
|
|
|
params.Add("aggregation.perSeriesAligner", "ALIGN_MEAN")
|
|
|
|
params.Add("aggregation.alignmentPeriod", "+60s")
|
|
|
|
}
|
|
|
|
|
2018-09-13 04:02:31 -05:00
|
|
|
groupBys := query.Model.Get("groupBys").MustArray()
|
|
|
|
if len(groupBys) > 0 {
|
|
|
|
for i := 0; i < len(groupBys); i++ {
|
|
|
|
params.Add("aggregation.groupByFields", groupBys[i].(string))
|
|
|
|
}
|
|
|
|
}
|
2018-09-11 17:24:59 -05:00
|
|
|
}
|
|
|
|
|
2018-09-11 15:41:24 -05:00
|
|
|
func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *StackdriverQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, error) {
|
|
|
|
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
|
|
|
|
|
|
|
|
req, err := e.createRequest(ctx, e.dsInfo)
|
|
|
|
if err != nil {
|
|
|
|
queryResult.Error = err
|
|
|
|
return queryResult, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
req.URL.RawQuery = query.Params.Encode()
|
|
|
|
queryResult.Meta.Set("rawQuery", req.URL.RawQuery)
|
|
|
|
|
|
|
|
span, ctx := opentracing.StartSpanFromContext(ctx, "stackdriver query")
|
|
|
|
span.SetTag("target", query.Target)
|
|
|
|
span.SetTag("from", tsdbQuery.TimeRange.From)
|
|
|
|
span.SetTag("until", tsdbQuery.TimeRange.To)
|
|
|
|
span.SetTag("datasource_id", e.dsInfo.Id)
|
|
|
|
span.SetTag("org_id", e.dsInfo.OrgId)
|
|
|
|
|
|
|
|
defer span.Finish()
|
|
|
|
|
|
|
|
opentracing.GlobalTracer().Inject(
|
|
|
|
span.Context(),
|
|
|
|
opentracing.HTTPHeaders,
|
|
|
|
opentracing.HTTPHeadersCarrier(req.Header))
|
|
|
|
|
|
|
|
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
|
|
|
if err != nil {
|
|
|
|
queryResult.Error = err
|
|
|
|
return queryResult, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
data, err := e.unmarshalResponse(res)
|
|
|
|
if err != nil {
|
|
|
|
queryResult.Error = err
|
|
|
|
return queryResult, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
err = e.parseResponse(queryResult, data)
|
|
|
|
if err != nil {
|
|
|
|
queryResult.Error = err
|
|
|
|
return queryResult, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return queryResult, nil
|
|
|
|
}
|
|
|
|
|
2018-09-10 12:07:44 -05:00
|
|
|
func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (StackDriverResponse, error) {
|
2018-09-04 06:21:58 -05:00
|
|
|
body, err := ioutil.ReadAll(res.Body)
|
|
|
|
defer res.Body.Close()
|
|
|
|
if err != nil {
|
2018-09-09 15:52:12 -05:00
|
|
|
return StackDriverResponse{}, err
|
2018-09-04 06:21:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if res.StatusCode/100 != 2 {
|
2018-09-11 15:41:24 -05:00
|
|
|
slog.Error("Request failed", "status", res.Status, "body", string(body))
|
|
|
|
return StackDriverResponse{}, fmt.Errorf(string(body))
|
2018-09-04 06:21:58 -05:00
|
|
|
}
|
|
|
|
|
2018-09-09 15:52:12 -05:00
|
|
|
var data StackDriverResponse
|
2018-09-04 06:21:58 -05:00
|
|
|
err = json.Unmarshal(body, &data)
|
|
|
|
if err != nil {
|
2018-09-11 15:41:24 -05:00
|
|
|
slog.Error("Failed to unmarshal Stackdriver response", "error", err, "status", res.Status, "body", string(body))
|
2018-09-09 15:52:12 -05:00
|
|
|
return StackDriverResponse{}, err
|
2018-09-04 06:21:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return data, nil
|
|
|
|
}
|
|
|
|
|
2018-09-11 15:41:24 -05:00
|
|
|
func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data StackDriverResponse) error {
|
2018-09-13 04:02:31 -05:00
|
|
|
metricLabels := make(map[string][]string)
|
|
|
|
// resourceLabels := make(map[string][]string)
|
|
|
|
|
2018-09-10 12:07:44 -05:00
|
|
|
for _, series := range data.TimeSeries {
|
|
|
|
points := make([]tsdb.TimePoint, 0)
|
2018-09-11 18:03:16 -05:00
|
|
|
|
|
|
|
// reverse the order to be ascending
|
|
|
|
for i := len(series.Points) - 1; i >= 0; i-- {
|
|
|
|
point := series.Points[i]
|
2018-09-10 12:07:44 -05:00
|
|
|
points = append(points, tsdb.NewTimePoint(null.FloatFrom(point.Value.DoubleValue), float64((point.Interval.EndTime).Unix())*1000))
|
|
|
|
}
|
2018-09-11 08:52:37 -05:00
|
|
|
metricName := series.Metric.Type
|
|
|
|
|
2018-09-13 04:02:31 -05:00
|
|
|
for key, value := range series.Metric.Labels {
|
|
|
|
metricLabels[key] = append(metricLabels[key], value)
|
2018-09-11 08:52:37 -05:00
|
|
|
metricName += " " + value
|
|
|
|
}
|
2018-09-13 04:02:31 -05:00
|
|
|
|
|
|
|
// queryRes.Meta.Set("resourceLabels", series.Resource.Labels)
|
|
|
|
|
2018-09-10 12:07:44 -05:00
|
|
|
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
|
2018-09-11 08:52:37 -05:00
|
|
|
Name: metricName,
|
2018-09-10 12:07:44 -05:00
|
|
|
Points: points,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-09-13 04:02:31 -05:00
|
|
|
queryRes.Meta.Set("metricLabels", metricLabels)
|
|
|
|
|
2018-09-11 15:41:24 -05:00
|
|
|
return nil
|
2018-09-10 12:07:44 -05:00
|
|
|
}
|
|
|
|
|
2018-09-10 10:49:13 -05:00
|
|
|
func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
2018-09-04 06:21:58 -05:00
|
|
|
u, _ := url.Parse(dsInfo.Url)
|
|
|
|
u.Path = path.Join(u.Path, "render")
|
|
|
|
|
2018-09-10 10:49:13 -05:00
|
|
|
req, err := http.NewRequest(http.MethodGet, "https://monitoring.googleapis.com/", nil)
|
2018-09-04 06:21:58 -05:00
|
|
|
if err != nil {
|
2018-09-11 08:52:37 -05:00
|
|
|
slog.Info("Failed to create request", "error", err)
|
2018-09-04 06:21:58 -05:00
|
|
|
return nil, fmt.Errorf("Failed to create request. error: %v", err)
|
|
|
|
}
|
|
|
|
|
2018-09-09 15:52:12 -05:00
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
|
2018-09-10 10:49:13 -05:00
|
|
|
// find plugin
|
|
|
|
plugin, ok := plugins.DataSources[dsInfo.Type]
|
|
|
|
if !ok {
|
|
|
|
return nil, errors.New("Unable to find datasource plugin Stackdriver")
|
2018-09-04 06:21:58 -05:00
|
|
|
}
|
2018-09-10 10:49:13 -05:00
|
|
|
proxyPass := fmt.Sprintf("stackdriver%s", "v3/projects/raintank-production/timeSeries")
|
|
|
|
|
|
|
|
var stackdriverRoute *plugins.AppPluginRoute
|
|
|
|
for _, route := range plugin.Routes {
|
|
|
|
if route.Path == "stackdriver" {
|
|
|
|
stackdriverRoute = route
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pluginproxy.ApplyRoute(ctx, req, proxyPass, stackdriverRoute, dsInfo)
|
2018-09-04 06:21:58 -05:00
|
|
|
|
2018-09-11 15:41:24 -05:00
|
|
|
return req, nil
|
2018-09-04 06:21:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func fixIntervalFormat(target string) string {
|
|
|
|
rMinute := regexp.MustCompile(`'(\d+)m'`)
|
|
|
|
rMin := regexp.MustCompile("m")
|
|
|
|
target = rMinute.ReplaceAllStringFunc(target, func(m string) string {
|
|
|
|
return rMin.ReplaceAllString(m, "min")
|
|
|
|
})
|
|
|
|
rMonth := regexp.MustCompile(`'(\d+)M'`)
|
|
|
|
rMon := regexp.MustCompile("M")
|
|
|
|
target = rMonth.ReplaceAllStringFunc(target, func(M string) string {
|
|
|
|
return rMon.ReplaceAllString(M, "mon")
|
|
|
|
})
|
|
|
|
return target
|
|
|
|
}
|