mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Stackdriver: Support for SLO queries (#22917)
* wip: add slo support * Export DataSourcePlugin * wip: break out metric query editor into its own component * wip: refactor frontend - keep SLO and Metric query in differnt objects * wip - load services and slos * Fix broken test * Add interactive slo expression builder * Change order of dropdowns * Refactoring backend model. slo unit testing in progress * Unit test migration and SLOs * Cleanup SLO editor * Simplify alias by component * Support alias by for slos * Support slos in variable queries * Fix broken last query error * Update Help section to include SLO aliases * streamline datasource resource cache * Break out api specific stuff in datasource to its own file * Move get projects call to frontend * Refactor api caching * Unit test api service * Fix lint go issue * Fix typescript strict errors * Fix test datasource * Use budget fraction selector instead of budget * Reset SLO when service is changed * Handle error in case resource call returned no data * Show real SLI display name * Use unsafe prefix on will mount hook * Store goal in query model since it will be used as soon as graph panel supports adding a threshold * Add comment to describe why componentWillMount is used * Interpolate sloid * Break out SLO aggregation into its own func * Also test group bys for metricquery test * Remove not used type fields * Remove annoying stackdriver prefix from error message * Default view param to FULL * Add part about SLO query builder in docs * Use new images * Fixes after feedback * Add one more group by test * Make stackdriver types internal * Update docs/sources/features/datasources/stackdriver.md Co-Authored-By: Diana Payton <52059945+oddlittlebird@users.noreply.github.com> * Update docs/sources/features/datasources/stackdriver.md Co-Authored-By: Diana Payton <52059945+oddlittlebird@users.noreply.github.com> * Update docs/sources/features/datasources/stackdriver.md Co-Authored-By: Diana Payton <52059945+oddlittlebird@users.noreply.github.com> * Updates after PR feedback * add test for when no alias by defined * fix infinite loop when newVariables feature flag is on onChange being called in componentDidUpdate produces an infinite loop when using the new React template variable implementation. Also fixes a spelling mistake * implements feedback for documentation changes * more doc changes Co-authored-by: Diana Payton <52059945+oddlittlebird@users.noreply.github.com> Co-authored-by: Daniel Lee <dan.limerick@gmail.com>
This commit is contained in:
@@ -34,7 +34,7 @@ func (e *StackdriverExecutor) executeAnnotationQuery(ctx context.Context, tsdbQu
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) parseToAnnotations(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery, title string, text string, tags string) error {
|
||||
func (e *StackdriverExecutor) parseToAnnotations(queryRes *tsdb.QueryResult, data stackdriverResponse, query *stackdriverQuery, title string, text string, tags string) error {
|
||||
annotations := make([]map[string]string, 0)
|
||||
|
||||
for _, series := range data.TimeSeries {
|
||||
|
||||
@@ -18,7 +18,7 @@ func TestStackdriverAnnotationQuery(t *testing.T) {
|
||||
So(len(data.TimeSeries), ShouldEqual, 3)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"}
|
||||
query := &StackdriverQuery{}
|
||||
query := &stackdriverQuery{}
|
||||
err = executor.parseToAnnotations(res, data, query, "atitle {{metric.label.instance_name}} {{metric.value}}", "atext {{resource.label.zone}}", "atag")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
|
||||
@@ -43,6 +43,8 @@ var (
|
||||
const (
|
||||
gceAuthentication string = "gce"
|
||||
jwtAuthentication string = "jwt"
|
||||
metricQueryType string = "metrics"
|
||||
sloQueryType string = "slo"
|
||||
)
|
||||
|
||||
// StackdriverExecutor executes queries for the Stackdriver datasource
|
||||
@@ -80,8 +82,6 @@ func (e *StackdriverExecutor) Query(ctx context.Context, dsInfo *models.DataSour
|
||||
switch queryType {
|
||||
case "annotationQuery":
|
||||
result, err = e.executeAnnotationQuery(ctx, tsdbQuery)
|
||||
case "getProjectsListQuery":
|
||||
result, err = e.getProjectList(ctx, tsdbQuery)
|
||||
case "getGCEDefaultProject":
|
||||
result, err = e.getGCEDefaultProject(ctx, tsdbQuery)
|
||||
case "timeSeriesQuery":
|
||||
@@ -136,8 +136,8 @@ func (e *StackdriverExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQu
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*StackdriverQuery, error) {
|
||||
stackdriverQueries := []*StackdriverQuery{}
|
||||
func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*stackdriverQuery, error) {
|
||||
stackdriverQueries := []*stackdriverQuery{}
|
||||
|
||||
startTime, err := tsdbQuery.TimeRange.ParseFrom()
|
||||
if err != nil {
|
||||
@@ -152,45 +152,67 @@ func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*Stackd
|
||||
durationSeconds := int(endTime.Sub(startTime).Seconds())
|
||||
|
||||
for _, query := range tsdbQuery.Queries {
|
||||
migrateLegacyQueryModel(query)
|
||||
q := grafanaQuery{}
|
||||
model, _ := query.Model.MarshalJSON()
|
||||
if err := json.Unmarshal(model, &q); err != nil {
|
||||
return nil, fmt.Errorf("could not unmarshal StackdriverQuery json: %w", err)
|
||||
}
|
||||
var target string
|
||||
|
||||
metricType := query.Model.Get("metricType").MustString()
|
||||
filterParts := query.Model.Get("filters").MustArray()
|
||||
|
||||
params := url.Values{}
|
||||
params.Add("interval.startTime", startTime.UTC().Format(time.RFC3339))
|
||||
params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339))
|
||||
params.Add("filter", buildFilterString(metricType, filterParts))
|
||||
params.Add("view", query.Model.Get("view").MustString("FULL"))
|
||||
setAggParams(¶ms, query, durationSeconds)
|
||||
|
||||
sq := &stackdriverQuery{
|
||||
RefID: query.RefId,
|
||||
GroupBys: []string{},
|
||||
}
|
||||
|
||||
if q.QueryType == metricQueryType {
|
||||
sq.AliasBy = q.MetricQuery.AliasBy
|
||||
sq.GroupBys = append(sq.GroupBys, q.MetricQuery.GroupBys...)
|
||||
sq.ProjectName = q.MetricQuery.ProjectName
|
||||
if q.MetricQuery.View == "" {
|
||||
q.MetricQuery.View = "FULL"
|
||||
}
|
||||
params.Add("filter", buildFilterString(q.MetricQuery.MetricType, q.MetricQuery.Filters))
|
||||
params.Add("view", q.MetricQuery.View)
|
||||
setMetricAggParams(¶ms, &q.MetricQuery, durationSeconds, query.IntervalMs)
|
||||
} else if q.QueryType == sloQueryType {
|
||||
sq.AliasBy = q.SloQuery.AliasBy
|
||||
sq.ProjectName = q.SloQuery.ProjectName
|
||||
sq.Selector = q.SloQuery.SelectorName
|
||||
sq.Service = q.SloQuery.ServiceId
|
||||
sq.Slo = q.SloQuery.SloId
|
||||
params.Add("filter", buildSLOFilterExpression(q.SloQuery))
|
||||
setSloAggParams(¶ms, &q.SloQuery, durationSeconds, query.IntervalMs)
|
||||
}
|
||||
|
||||
target = params.Encode()
|
||||
sq.Target = target
|
||||
sq.Params = params
|
||||
|
||||
if setting.Env == setting.DEV {
|
||||
slog.Debug("Stackdriver request", "params", params)
|
||||
}
|
||||
|
||||
groupBys := query.Model.Get("groupBys").MustArray()
|
||||
groupBysAsStrings := make([]string, 0)
|
||||
for _, groupBy := range groupBys {
|
||||
groupBysAsStrings = append(groupBysAsStrings, groupBy.(string))
|
||||
}
|
||||
|
||||
aliasBy := query.Model.Get("aliasBy").MustString()
|
||||
|
||||
stackdriverQueries = append(stackdriverQueries, &StackdriverQuery{
|
||||
Target: target,
|
||||
Params: params,
|
||||
RefID: query.RefId,
|
||||
GroupBys: groupBysAsStrings,
|
||||
AliasBy: aliasBy,
|
||||
ProjectName: query.Model.Get("projectName").MustString(""),
|
||||
})
|
||||
stackdriverQueries = append(stackdriverQueries, sq)
|
||||
}
|
||||
|
||||
return stackdriverQueries, nil
|
||||
}
|
||||
|
||||
func migrateLegacyQueryModel(query *tsdb.Query) {
|
||||
mq := query.Model.Get("metricQuery").MustMap()
|
||||
if mq == nil {
|
||||
migratedModel := simplejson.NewFromAny(map[string]interface{}{
|
||||
"queryType": metricQueryType,
|
||||
"metricQuery": query.Model,
|
||||
})
|
||||
query.Model = migratedModel
|
||||
}
|
||||
}
|
||||
|
||||
func reverse(s string) string {
|
||||
chars := []rune(s)
|
||||
for i, j := 0, len(chars)-1; i < j; i, j = i+1, j-1 {
|
||||
@@ -222,7 +244,7 @@ func interpolateFilterWildcards(value string) string {
|
||||
return value
|
||||
}
|
||||
|
||||
func buildFilterString(metricType string, filterParts []interface{}) string {
|
||||
func buildFilterString(metricType string, filterParts []string) string {
|
||||
filterString := ""
|
||||
for i, part := range filterParts {
|
||||
mod := i % 4
|
||||
@@ -233,33 +255,53 @@ func buildFilterString(metricType string, filterParts []interface{}) string {
|
||||
if operator == "=~" || operator == "!=~" {
|
||||
filterString = reverse(strings.Replace(reverse(filterString), "~", "", 1))
|
||||
filterString += fmt.Sprintf(`monitoring.regex.full_match("%s")`, part)
|
||||
} else if strings.Contains(part.(string), "*") {
|
||||
filterString += interpolateFilterWildcards(part.(string))
|
||||
} else if strings.Contains(part, "*") {
|
||||
filterString += interpolateFilterWildcards(part)
|
||||
} else {
|
||||
filterString += fmt.Sprintf(`"%s"`, part)
|
||||
}
|
||||
} else {
|
||||
filterString += part.(string)
|
||||
filterString += part
|
||||
}
|
||||
}
|
||||
|
||||
return strings.Trim(fmt.Sprintf(`metric.type="%s" %s`, metricType, filterString), " ")
|
||||
}
|
||||
|
||||
func setAggParams(params *url.Values, query *tsdb.Query, durationSeconds int) {
|
||||
crossSeriesReducer := query.Model.Get("crossSeriesReducer").MustString()
|
||||
perSeriesAligner := query.Model.Get("perSeriesAligner").MustString()
|
||||
alignmentPeriod := query.Model.Get("alignmentPeriod").MustString()
|
||||
func buildSLOFilterExpression(q sloQuery) string {
|
||||
return fmt.Sprintf(`%s("projects/%s/services/%s/serviceLevelObjectives/%s")`, q.SelectorName, q.ProjectName, q.ServiceId, q.SloId)
|
||||
}
|
||||
|
||||
if crossSeriesReducer == "" {
|
||||
crossSeriesReducer = "REDUCE_NONE"
|
||||
func setMetricAggParams(params *url.Values, query *metricQuery, durationSeconds int, intervalMs int64) {
|
||||
if query.CrossSeriesReducer == "" {
|
||||
query.CrossSeriesReducer = "REDUCE_NONE"
|
||||
}
|
||||
|
||||
if perSeriesAligner == "" {
|
||||
perSeriesAligner = "ALIGN_MEAN"
|
||||
if query.PerSeriesAligner == "" {
|
||||
query.PerSeriesAligner = "ALIGN_MEAN"
|
||||
}
|
||||
|
||||
params.Add("aggregation.crossSeriesReducer", query.CrossSeriesReducer)
|
||||
params.Add("aggregation.perSeriesAligner", query.PerSeriesAligner)
|
||||
params.Add("aggregation.alignmentPeriod", calculateAlignmentPeriod(query.AlignmentPeriod, intervalMs, durationSeconds))
|
||||
|
||||
for _, groupBy := range query.GroupBys {
|
||||
params.Add("aggregation.groupByFields", groupBy)
|
||||
}
|
||||
}
|
||||
|
||||
func setSloAggParams(params *url.Values, query *sloQuery, durationSeconds int, intervalMs int64) {
|
||||
params.Add("aggregation.alignmentPeriod", calculateAlignmentPeriod(query.AlignmentPeriod, intervalMs, durationSeconds))
|
||||
if query.SelectorName == "select_slo_health" {
|
||||
params.Add("aggregation.perSeriesAligner", "ALIGN_MEAN")
|
||||
} else {
|
||||
params.Add("aggregation.perSeriesAligner", "ALIGN_NEXT_OLDER")
|
||||
}
|
||||
}
|
||||
|
||||
func calculateAlignmentPeriod(alignmentPeriod string, intervalMs int64, durationSeconds int) string {
|
||||
if alignmentPeriod == "grafana-auto" || alignmentPeriod == "" {
|
||||
alignmentPeriodValue := int(math.Max(float64(query.IntervalMs)/1000, 60.0))
|
||||
alignmentPeriodValue := int(math.Max(float64(intervalMs)/1000, 60.0))
|
||||
alignmentPeriod = "+" + strconv.Itoa(alignmentPeriodValue) + "s"
|
||||
}
|
||||
|
||||
@@ -274,24 +316,15 @@ func setAggParams(params *url.Values, query *tsdb.Query, durationSeconds int) {
|
||||
}
|
||||
}
|
||||
|
||||
params.Add("aggregation.crossSeriesReducer", crossSeriesReducer)
|
||||
params.Add("aggregation.perSeriesAligner", perSeriesAligner)
|
||||
params.Add("aggregation.alignmentPeriod", alignmentPeriod)
|
||||
|
||||
groupBys := query.Model.Get("groupBys").MustArray()
|
||||
if len(groupBys) > 0 {
|
||||
for i := 0; i < len(groupBys); i++ {
|
||||
params.Add("aggregation.groupByFields", groupBys[i].(string))
|
||||
}
|
||||
}
|
||||
return alignmentPeriod
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *StackdriverQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, StackdriverResponse, error) {
|
||||
func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *stackdriverQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, stackdriverResponse, error) {
|
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
|
||||
req, err := e.createRequest(ctx, e.dsInfo, query, fmt.Sprintf("stackdriver%s", "v3/projects/"+query.ProjectName+"/timeSeries"))
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, StackdriverResponse{}, nil
|
||||
return queryResult, stackdriverResponse{}, nil
|
||||
}
|
||||
|
||||
req.URL.RawQuery = query.Params.Encode()
|
||||
@@ -319,69 +352,47 @@ func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *Stackdriv
|
||||
opentracing.HTTPHeaders,
|
||||
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, StackdriverResponse{}, nil
|
||||
return queryResult, stackdriverResponse{}, nil
|
||||
}
|
||||
|
||||
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, StackdriverResponse{}, nil
|
||||
return queryResult, stackdriverResponse{}, nil
|
||||
}
|
||||
|
||||
data, err := e.unmarshalResponse(res)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, StackdriverResponse{}, nil
|
||||
return queryResult, stackdriverResponse{}, nil
|
||||
}
|
||||
|
||||
return queryResult, data, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (StackdriverResponse, error) {
|
||||
func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (stackdriverResponse, error) {
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
defer res.Body.Close()
|
||||
if err != nil {
|
||||
return StackdriverResponse{}, err
|
||||
return stackdriverResponse{}, err
|
||||
}
|
||||
|
||||
if res.StatusCode/100 != 2 {
|
||||
slog.Error("Request failed", "status", res.Status, "body", string(body))
|
||||
return StackdriverResponse{}, fmt.Errorf(string(body))
|
||||
return stackdriverResponse{}, fmt.Errorf(string(body))
|
||||
}
|
||||
|
||||
var data StackdriverResponse
|
||||
var data stackdriverResponse
|
||||
err = json.Unmarshal(body, &data)
|
||||
if err != nil {
|
||||
slog.Error("Failed to unmarshal Stackdriver response", "error", err, "status", res.Status, "body", string(body))
|
||||
return StackdriverResponse{}, err
|
||||
return stackdriverResponse{}, err
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) unmarshalResourceResponse(res *http.Response) (ResourceManagerProjectList, error) {
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
defer res.Body.Close()
|
||||
if err != nil {
|
||||
return ResourceManagerProjectList{}, err
|
||||
}
|
||||
|
||||
if res.StatusCode/100 != 2 {
|
||||
slog.Error("Request failed", "status", res.Status, "body", string(body))
|
||||
return ResourceManagerProjectList{}, fmt.Errorf(string(body))
|
||||
}
|
||||
|
||||
var data ResourceManagerProjectList
|
||||
err = json.Unmarshal(body, &data)
|
||||
if err != nil {
|
||||
slog.Error("Failed to unmarshal Resource manager response", "error", err, "status", res.Status, "body", string(body))
|
||||
return ResourceManagerProjectList{}, err
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery) error {
|
||||
func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data stackdriverResponse, query *stackdriverQuery) error {
|
||||
labels := make(map[string]map[string]bool)
|
||||
|
||||
for _, series := range data.TimeSeries {
|
||||
@@ -389,6 +400,7 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
|
||||
seriesLabels := make(map[string]string)
|
||||
defaultMetricName := series.Metric.Type
|
||||
labels["resource.type"] = map[string]bool{series.Resource.Type: true}
|
||||
seriesLabels["resource.type"] = series.Resource.Type
|
||||
|
||||
for key, value := range series.Metric.Labels {
|
||||
if _, ok := labels["metric.label."+key]; !ok {
|
||||
@@ -546,7 +558,7 @@ func containsLabel(labels []string, newLabel string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func formatLegendKeys(metricType string, defaultMetricName string, labels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string {
|
||||
func formatLegendKeys(metricType string, defaultMetricName string, labels map[string]string, additionalLabels map[string]string, query *stackdriverQuery) string {
|
||||
if query.AliasBy == "" {
|
||||
return defaultMetricName
|
||||
}
|
||||
@@ -574,6 +586,22 @@ func formatLegendKeys(metricType string, defaultMetricName string, labels map[st
|
||||
return []byte(val)
|
||||
}
|
||||
|
||||
if metaPartName == "project" && query.ProjectName != "" {
|
||||
return []byte(query.ProjectName)
|
||||
}
|
||||
|
||||
if metaPartName == "service" && query.Service != "" {
|
||||
return []byte(query.Service)
|
||||
}
|
||||
|
||||
if metaPartName == "slo" && query.Slo != "" {
|
||||
return []byte(query.Slo)
|
||||
}
|
||||
|
||||
if metaPartName == "selector" && query.Selector != "" {
|
||||
return []byte(query.Selector)
|
||||
}
|
||||
|
||||
return in
|
||||
})
|
||||
|
||||
@@ -599,7 +627,7 @@ func replaceWithMetricPart(metaPartName string, metricType string) []byte {
|
||||
return nil
|
||||
}
|
||||
|
||||
func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string {
|
||||
func calcBucketBound(bucketOptions stackdriverBucketOptions, n int) string {
|
||||
bucketBound := "0"
|
||||
if n == 0 {
|
||||
return bucketBound
|
||||
@@ -615,7 +643,7 @@ func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string {
|
||||
return bucketBound
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource, query *StackdriverQuery, proxyPass string) (*http.Request, error) {
|
||||
func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource, query *stackdriverQuery, proxyPass string) (*http.Request, error) {
|
||||
u, _ := url.Parse(dsInfo.Url)
|
||||
u.Path = path.Join(u.Path, "render")
|
||||
|
||||
@@ -647,39 +675,6 @@ func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) createRequestResourceManager(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
||||
u, _ := url.Parse(dsInfo.Url)
|
||||
u.Path = path.Join(u.Path, "render")
|
||||
|
||||
req, err := http.NewRequest(http.MethodGet, "https://cloudresourcemanager.googleapis.com/", nil)
|
||||
if err != nil {
|
||||
slog.Error("Failed to create request", "error", err)
|
||||
return nil, fmt.Errorf("Failed to create request. error: %v", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
|
||||
|
||||
// find plugin
|
||||
plugin, ok := plugins.DataSources[dsInfo.Type]
|
||||
if !ok {
|
||||
return nil, errors.New("Unable to find datasource plugin Stackdriver")
|
||||
}
|
||||
|
||||
var resourceManagerRoute *plugins.AppPluginRoute
|
||||
for _, route := range plugin.Routes {
|
||||
if route.Path == "cloudresourcemanager" {
|
||||
resourceManagerRoute = route
|
||||
break
|
||||
}
|
||||
}
|
||||
proxyPass := "v1/projects"
|
||||
|
||||
pluginproxy.ApplyRoute(ctx, req, proxyPass, resourceManagerRoute, dsInfo)
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) getDefaultProject(ctx context.Context) (string, error) {
|
||||
authenticationType := e.dsInfo.JsonData.Get("authenticationType").MustString(jwtAuthentication)
|
||||
if authenticationType == gceAuthentication {
|
||||
@@ -699,55 +694,3 @@ func (e *StackdriverExecutor) getDefaultProject(ctx context.Context) (string, er
|
||||
}
|
||||
return e.dsInfo.JsonData.Get("defaultProject").MustString(), nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) getProjectList(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: tsdbQuery.Queries[0].RefId}
|
||||
result := &tsdb.Response{
|
||||
Results: make(map[string]*tsdb.QueryResult),
|
||||
}
|
||||
projectsList, err := e.getProjects(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
queryResult.Meta.Set("projectsList", projectsList)
|
||||
result.Results[tsdbQuery.Queries[0].RefId] = queryResult
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) getProjects(ctx context.Context) ([]ResourceManagerProjectSelect, error) {
|
||||
var projects []ResourceManagerProjectSelect
|
||||
|
||||
req, err := e.createRequestResourceManager(ctx, e.dsInfo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
span, ctx := opentracing.StartSpanFromContext(ctx, "resource manager query")
|
||||
span.SetTag("datasource_id", e.dsInfo.Id)
|
||||
span.SetTag("org_id", e.dsInfo.OrgId)
|
||||
|
||||
defer span.Finish()
|
||||
|
||||
if err := opentracing.GlobalTracer().Inject(
|
||||
span.Context(),
|
||||
opentracing.HTTPHeaders,
|
||||
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, err := e.unmarshalResourceResponse(res)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, project := range data.Projects {
|
||||
projects = append(projects, ResourceManagerProjectSelect{Label: project.ProjectID, Value: project.ProjectID})
|
||||
}
|
||||
return projects, nil
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ func TestStackdriver(t *testing.T) {
|
||||
Convey("Stackdriver", t, func() {
|
||||
executor := &StackdriverExecutor{}
|
||||
|
||||
Convey("Parse queries from frontend and build Stackdriver API queries", func() {
|
||||
Convey("Parse migrated queries from frontend and build Stackdriver API queries", func() {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
tsdbQuery := &tsdb.TsdbQuery{
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
@@ -208,6 +208,99 @@ func TestStackdriver(t *testing.T) {
|
||||
|
||||
})
|
||||
|
||||
Convey("Parse queries from frontend and build Stackdriver API queries", func() {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
tsdbQuery := &tsdb.TsdbQuery{
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"queryType": metricQueryType,
|
||||
"metricQuery": map[string]interface{}{
|
||||
"metricType": "a/metric/type",
|
||||
"view": "FULL",
|
||||
"aliasBy": "testalias",
|
||||
"type": "timeSeriesQuery",
|
||||
"groupBys": []interface{}{"metric.label.group1", "metric.label.group2"},
|
||||
},
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Convey("and query type is metrics", func() {
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].RefID, ShouldEqual, "A")
|
||||
So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL")
|
||||
So(len(queries[0].Params), ShouldEqual, 8)
|
||||
So(queries[0].Params["aggregation.groupByFields"][0], ShouldEqual, "metric.label.group1")
|
||||
So(queries[0].Params["aggregation.groupByFields"][1], ShouldEqual, "metric.label.group2")
|
||||
So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z")
|
||||
So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z")
|
||||
So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN")
|
||||
So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"")
|
||||
So(queries[0].Params["view"][0], ShouldEqual, "FULL")
|
||||
So(queries[0].AliasBy, ShouldEqual, "testalias")
|
||||
So(queries[0].GroupBys, ShouldResemble, []string{"metric.label.group1", "metric.label.group2"})
|
||||
})
|
||||
|
||||
Convey("and query type is SLOs", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"queryType": sloQueryType,
|
||||
"metricQuery": map[string]interface{}{},
|
||||
"sloQuery": map[string]interface{}{
|
||||
"projectName": "test-proj",
|
||||
"alignmentPeriod": "stackdriver-auto",
|
||||
"perSeriesAligner": "ALIGN_NEXT_OLDER",
|
||||
"aliasBy": "",
|
||||
"selectorName": "select_slo_health",
|
||||
"serviceId": "test-service",
|
||||
"sloId": "test-slo",
|
||||
},
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].RefID, ShouldEqual, "A")
|
||||
So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z")
|
||||
So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z")
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`)
|
||||
So(queries[0].AliasBy, ShouldEqual, "")
|
||||
So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN")
|
||||
So(queries[0].Target, ShouldEqual, `aggregation.alignmentPeriod=%2B60s&aggregation.perSeriesAligner=ALIGN_MEAN&filter=select_slo_health%28%22projects%2Ftest-proj%2Fservices%2Ftest-service%2FserviceLevelObjectives%2Ftest-slo%22%29&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z`)
|
||||
So(len(queries[0].Params), ShouldEqual, 5)
|
||||
|
||||
Convey("and perSeriesAligner is inferred by SLO selector", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"queryType": sloQueryType,
|
||||
"metricQuery": map[string]interface{}{},
|
||||
"sloQuery": map[string]interface{}{
|
||||
"projectName": "test-proj",
|
||||
"alignmentPeriod": "stackdriver-auto",
|
||||
"perSeriesAligner": "ALIGN_NEXT_OLDER",
|
||||
"aliasBy": "",
|
||||
"selectorName": "select_slo_compliance",
|
||||
"serviceId": "test-service",
|
||||
"sloId": "test-slo",
|
||||
},
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_NEXT_OLDER")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Parse stackdriver response in the time series format", func() {
|
||||
Convey("when data from query aggregated to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json")
|
||||
@@ -215,7 +308,7 @@ func TestStackdriver(t *testing.T) {
|
||||
So(len(data.TimeSeries), ShouldEqual, 1)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{}
|
||||
query := &stackdriverQuery{}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
@@ -241,7 +334,7 @@ func TestStackdriver(t *testing.T) {
|
||||
So(len(data.TimeSeries), ShouldEqual, 3)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{}
|
||||
query := &stackdriverQuery{}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
@@ -283,7 +376,7 @@ func TestStackdriver(t *testing.T) {
|
||||
So(len(data.TimeSeries), ShouldEqual, 3)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
query := &stackdriverQuery{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
@@ -304,7 +397,7 @@ func TestStackdriver(t *testing.T) {
|
||||
|
||||
Convey("and the alias pattern is for metric type, a metric label and a resource label", func() {
|
||||
|
||||
query := &StackdriverQuery{AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
query := &stackdriverQuery{AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
@@ -318,7 +411,7 @@ func TestStackdriver(t *testing.T) {
|
||||
|
||||
Convey("and the alias pattern is for metric name", func() {
|
||||
|
||||
query := &StackdriverQuery{AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
query := &stackdriverQuery{AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
@@ -337,7 +430,7 @@ func TestStackdriver(t *testing.T) {
|
||||
So(len(data.TimeSeries), ShouldEqual, 1)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{AliasBy: "{{bucket}}"}
|
||||
query := &stackdriverQuery{AliasBy: "{{bucket}}"}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
@@ -384,7 +477,7 @@ func TestStackdriver(t *testing.T) {
|
||||
So(len(data.TimeSeries), ShouldEqual, 1)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{AliasBy: "{{bucket}}"}
|
||||
query := &stackdriverQuery{AliasBy: "{{bucket}}"}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
@@ -424,7 +517,7 @@ func TestStackdriver(t *testing.T) {
|
||||
So(len(data.TimeSeries), ShouldEqual, 3)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{AliasBy: "{{bucket}}"}
|
||||
query := &stackdriverQuery{AliasBy: "{{bucket}}"}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
labels := res.Meta.Get("labels").Interface().(map[string][]string)
|
||||
So(err, ShouldBeNil)
|
||||
@@ -463,7 +556,7 @@ func TestStackdriver(t *testing.T) {
|
||||
|
||||
Convey("and systemlabel contains key with array of string", func() {
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{AliasBy: "{{metadata.system_labels.test}}"}
|
||||
query := &stackdriverQuery{AliasBy: "{{metadata.system_labels.test}}"}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
So(len(res.Series), ShouldEqual, 3)
|
||||
@@ -475,7 +568,7 @@ func TestStackdriver(t *testing.T) {
|
||||
|
||||
Convey("and systemlabel contains key with array of string2", func() {
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{AliasBy: "{{metadata.system_labels.test2}}"}
|
||||
query := &stackdriverQuery{AliasBy: "{{metadata.system_labels.test2}}"}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
So(len(res.Series), ShouldEqual, 3)
|
||||
@@ -483,6 +576,45 @@ func TestStackdriver(t *testing.T) {
|
||||
So(res.Series[2].Name, ShouldEqual, "testvalue")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("when data from query returns slo and alias by is defined", func() {
|
||||
data, err := loadTestFile("./test-data/6-series-response-slo.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(len(data.TimeSeries), ShouldEqual, 1)
|
||||
|
||||
Convey("and alias by is expanded", func() {
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &stackdriverQuery{
|
||||
ProjectName: "test-proj",
|
||||
Selector: "select_slo_compliance",
|
||||
Service: "test-service",
|
||||
Slo: "test-slo",
|
||||
AliasBy: "{{project}} - {{service}} - {{slo}} - {{selector}}",
|
||||
}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
So(res.Series[0].Name, ShouldEqual, "test-proj - test-service - test-slo - select_slo_compliance")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("when data from query returns slo and alias by is not defined", func() {
|
||||
data, err := loadTestFile("./test-data/6-series-response-slo.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(len(data.TimeSeries), ShouldEqual, 1)
|
||||
|
||||
Convey("and alias by is expanded", func() {
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &stackdriverQuery{
|
||||
ProjectName: "test-proj",
|
||||
Selector: "select_slo_compliance",
|
||||
Service: "test-service",
|
||||
Slo: "test-slo",
|
||||
}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
So(res.Series[0].Name, ShouldEqual, "select_slo_compliance(\"projects/test-proj/services/test-service/serviceLevelObjectives/test-slo\")")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("when interpolating filter wildcards", func() {
|
||||
@@ -550,20 +682,20 @@ func TestStackdriver(t *testing.T) {
|
||||
Convey("when building filter string", func() {
|
||||
Convey("and theres no regex operator", func() {
|
||||
Convey("and there are wildcards in a filter value", func() {
|
||||
filterParts := []interface{}{"zone", "=", "*-central1*"}
|
||||
filterParts := []string{"zone", "=", "*-central1*"}
|
||||
value := buildFilterString("somemetrictype", filterParts)
|
||||
So(value, ShouldEqual, `metric.type="somemetrictype" zone=has_substring("-central1")`)
|
||||
})
|
||||
|
||||
Convey("and there are no wildcards in any filter value", func() {
|
||||
filterParts := []interface{}{"zone", "!=", "us-central1-a"}
|
||||
filterParts := []string{"zone", "!=", "us-central1-a"}
|
||||
value := buildFilterString("somemetrictype", filterParts)
|
||||
So(value, ShouldEqual, `metric.type="somemetrictype" zone!="us-central1-a"`)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and there is a regex operator", func() {
|
||||
filterParts := []interface{}{"zone", "=~", "us-central1-a~"}
|
||||
filterParts := []string{"zone", "=~", "us-central1-a~"}
|
||||
value := buildFilterString("somemetrictype", filterParts)
|
||||
Convey("it should remove the ~ character from the operator that belongs to the value", func() {
|
||||
So(value, ShouldNotContainSubstring, `=~`)
|
||||
@@ -578,8 +710,8 @@ func TestStackdriver(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func loadTestFile(path string) (StackdriverResponse, error) {
|
||||
var data StackdriverResponse
|
||||
func loadTestFile(path string) (stackdriverResponse, error) {
|
||||
var data stackdriverResponse
|
||||
|
||||
jsonBody, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
|
||||
17
pkg/tsdb/stackdriver/test-data/6-series-response-slo.json
Normal file
17
pkg/tsdb/stackdriver/test-data/6-series-response-slo.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"timeSeries": [{
|
||||
"metric": {
|
||||
"type": "select_slo_compliance(\"projects/test-proj/services/test-service/serviceLevelObjectives/test-slo\")"
|
||||
},
|
||||
"resource": {
|
||||
"type": "gce_instance",
|
||||
"labels": {
|
||||
"instance_id": "114250375703598695",
|
||||
"project_id": "test-proj"
|
||||
}
|
||||
},
|
||||
"metricKind": "DELTA",
|
||||
"valueType": "INT64"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -6,17 +6,49 @@ import (
|
||||
)
|
||||
|
||||
type (
|
||||
// StackdriverQuery is the query that Grafana sends from the frontend
|
||||
StackdriverQuery struct {
|
||||
stackdriverQuery struct {
|
||||
Target string
|
||||
Params url.Values
|
||||
RefID string
|
||||
GroupBys []string
|
||||
AliasBy string
|
||||
ProjectName string
|
||||
Selector string
|
||||
Service string
|
||||
Slo string
|
||||
}
|
||||
|
||||
StackdriverBucketOptions struct {
|
||||
metricQuery struct {
|
||||
ProjectName string
|
||||
MetricType string
|
||||
CrossSeriesReducer string
|
||||
AlignmentPeriod string
|
||||
PerSeriesAligner string
|
||||
GroupBys []string
|
||||
Filters []string
|
||||
AliasBy string
|
||||
View string
|
||||
}
|
||||
|
||||
sloQuery struct {
|
||||
ProjectName string
|
||||
AlignmentPeriod string
|
||||
PerSeriesAligner string
|
||||
AliasBy string
|
||||
SelectorName string
|
||||
ServiceId string
|
||||
SloId string
|
||||
}
|
||||
|
||||
grafanaQuery struct {
|
||||
DatasourceId int
|
||||
RefId string
|
||||
QueryType string
|
||||
MetricQuery metricQuery
|
||||
SloQuery sloQuery
|
||||
}
|
||||
|
||||
stackdriverBucketOptions struct {
|
||||
LinearBuckets *struct {
|
||||
NumFiniteBuckets int64 `json:"numFiniteBuckets"`
|
||||
Width int64 `json:"width"`
|
||||
@@ -32,8 +64,7 @@ type (
|
||||
} `json:"explicitBuckets"`
|
||||
}
|
||||
|
||||
// StackdriverResponse is the data returned from the external Google Stackdriver API
|
||||
StackdriverResponse struct {
|
||||
stackdriverResponse struct {
|
||||
TimeSeries []struct {
|
||||
Metric struct {
|
||||
Labels map[string]string `json:"labels"`
|
||||
@@ -64,7 +95,7 @@ type (
|
||||
Min int `json:"min"`
|
||||
Max int `json:"max"`
|
||||
} `json:"range"`
|
||||
BucketOptions StackdriverBucketOptions `json:"bucketOptions"`
|
||||
BucketOptions stackdriverBucketOptions `json:"bucketOptions"`
|
||||
BucketCounts []string `json:"bucketCounts"`
|
||||
Examplars []struct {
|
||||
Value float64 `json:"value"`
|
||||
@@ -76,18 +107,4 @@ type (
|
||||
} `json:"points"`
|
||||
} `json:"timeSeries"`
|
||||
}
|
||||
|
||||
// ResourceManagerProjectList is the data returned from the external Google Resource Manager API
|
||||
ResourceManagerProjectList struct {
|
||||
Projects []ResourceManagerProject `json:"projects"`
|
||||
}
|
||||
|
||||
ResourceManagerProject struct {
|
||||
ProjectID string `json:"projectId"`
|
||||
}
|
||||
|
||||
ResourceManagerProjectSelect struct {
|
||||
Label string `json:"label"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user