mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge pull request #13490 from mtanda/stackdriver_distribution
Stackdriver heatmap support. Fixes #13559
This commit is contained in:
commit
a1d4675169
@ -341,7 +341,28 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
|
|||||||
for _, series := range data.TimeSeries {
|
for _, series := range data.TimeSeries {
|
||||||
points := make([]tsdb.TimePoint, 0)
|
points := make([]tsdb.TimePoint, 0)
|
||||||
|
|
||||||
|
defaultMetricName := series.Metric.Type
|
||||||
|
|
||||||
|
for key, value := range series.Metric.Labels {
|
||||||
|
if !containsLabel(metricLabels[key], value) {
|
||||||
|
metricLabels[key] = append(metricLabels[key], value)
|
||||||
|
}
|
||||||
|
if len(query.GroupBys) == 0 || containsLabel(query.GroupBys, "metric.label."+key) {
|
||||||
|
defaultMetricName += " " + value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value := range series.Resource.Labels {
|
||||||
|
if !containsLabel(resourceLabels[key], value) {
|
||||||
|
resourceLabels[key] = append(resourceLabels[key], value)
|
||||||
|
}
|
||||||
|
if containsLabel(query.GroupBys, "resource.label."+key) {
|
||||||
|
defaultMetricName += " " + value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// reverse the order to be ascending
|
// reverse the order to be ascending
|
||||||
|
if series.ValueType != "DISTRIBUTION" {
|
||||||
for i := len(series.Points) - 1; i >= 0; i-- {
|
for i := len(series.Points) - 1; i >= 0; i-- {
|
||||||
point := series.Points[i]
|
point := series.Points[i]
|
||||||
value := point.Value.DoubleValue
|
value := point.Value.DoubleValue
|
||||||
@ -364,33 +385,58 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
|
|||||||
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
|
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultMetricName := series.Metric.Type
|
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, make(map[string]string), query)
|
||||||
|
|
||||||
for key, value := range series.Metric.Labels {
|
|
||||||
if !containsLabel(metricLabels[key], value) {
|
|
||||||
metricLabels[key] = append(metricLabels[key], value)
|
|
||||||
}
|
|
||||||
if len(query.GroupBys) == 0 || containsLabel(query.GroupBys, "metric.label."+key) {
|
|
||||||
defaultMetricName += " " + value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for key, value := range series.Resource.Labels {
|
|
||||||
if !containsLabel(resourceLabels[key], value) {
|
|
||||||
resourceLabels[key] = append(resourceLabels[key], value)
|
|
||||||
}
|
|
||||||
|
|
||||||
if containsLabel(query.GroupBys, "resource.label."+key) {
|
|
||||||
defaultMetricName += " " + value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, query)
|
|
||||||
|
|
||||||
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
|
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
|
||||||
Name: metricName,
|
Name: metricName,
|
||||||
Points: points,
|
Points: points,
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
buckets := make(map[int]*tsdb.TimeSeries)
|
||||||
|
|
||||||
|
for i := len(series.Points) - 1; i >= 0; i-- {
|
||||||
|
point := series.Points[i]
|
||||||
|
if len(point.Value.DistributionValue.BucketCounts) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
maxKey := 0
|
||||||
|
for i := 0; i < len(point.Value.DistributionValue.BucketCounts); i++ {
|
||||||
|
value, err := strconv.ParseFloat(point.Value.DistributionValue.BucketCounts[i], 64)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := buckets[i]; !ok {
|
||||||
|
// set lower bounds
|
||||||
|
// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
|
||||||
|
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
|
||||||
|
additionalLabels := map[string]string{"bucket": bucketBound}
|
||||||
|
buckets[i] = &tsdb.TimeSeries{
|
||||||
|
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
|
||||||
|
Points: make([]tsdb.TimePoint, 0),
|
||||||
|
}
|
||||||
|
if maxKey < i {
|
||||||
|
maxKey = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
buckets[i].Points = append(buckets[i].Points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
|
||||||
|
}
|
||||||
|
|
||||||
|
// fill empty bucket
|
||||||
|
for i := 0; i < maxKey; i++ {
|
||||||
|
if _, ok := buckets[i]; !ok {
|
||||||
|
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
|
||||||
|
additionalLabels := map[string]string{"bucket": bucketBound}
|
||||||
|
buckets[i] = &tsdb.TimeSeries{
|
||||||
|
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
|
||||||
|
Points: make([]tsdb.TimePoint, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i := 0; i < len(buckets); i++ {
|
||||||
|
queryRes.Series = append(queryRes.Series, buckets[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
queryRes.Meta.Set("resourceLabels", resourceLabels)
|
queryRes.Meta.Set("resourceLabels", resourceLabels)
|
||||||
@ -409,7 +455,7 @@ func containsLabel(labels []string, newLabel string) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, query *StackdriverQuery) string {
|
func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string {
|
||||||
if query.AliasBy == "" {
|
if query.AliasBy == "" {
|
||||||
return defaultMetricName
|
return defaultMetricName
|
||||||
}
|
}
|
||||||
@ -441,6 +487,10 @@ func formatLegendKeys(metricType string, defaultMetricName string, metricLabels
|
|||||||
return []byte(val)
|
return []byte(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if val, exists := additionalLabels[metaPartName]; exists {
|
||||||
|
return []byte(val)
|
||||||
|
}
|
||||||
|
|
||||||
return in
|
return in
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -466,6 +516,22 @@ func replaceWithMetricPart(metaPartName string, metricType string) []byte {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string {
|
||||||
|
bucketBound := "0"
|
||||||
|
if n == 0 {
|
||||||
|
return bucketBound
|
||||||
|
}
|
||||||
|
|
||||||
|
if bucketOptions.LinearBuckets != nil {
|
||||||
|
bucketBound = strconv.FormatInt(bucketOptions.LinearBuckets.Offset+(bucketOptions.LinearBuckets.Width*int64(n-1)), 10)
|
||||||
|
} else if bucketOptions.ExponentialBuckets != nil {
|
||||||
|
bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10)
|
||||||
|
} else if bucketOptions.ExplicitBuckets != nil {
|
||||||
|
bucketBound = strconv.FormatInt(bucketOptions.ExplicitBuckets.Bounds[(n-1)], 10)
|
||||||
|
}
|
||||||
|
return bucketBound
|
||||||
|
}
|
||||||
|
|
||||||
func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
||||||
u, _ := url.Parse(dsInfo.Url)
|
u, _ := url.Parse(dsInfo.Url)
|
||||||
u.Path = path.Join(u.Path, "render")
|
u.Path = path.Join(u.Path, "render")
|
||||||
|
@ -4,6 +4,8 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -341,6 +343,46 @@ func TestStackdriver(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("when data from query is distribution", func() {
|
||||||
|
data, err := loadTestFile("./test-data/3-series-response-distribution.json")
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
So(len(data.TimeSeries), ShouldEqual, 1)
|
||||||
|
|
||||||
|
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||||
|
query := &StackdriverQuery{AliasBy: "{{bucket}}"}
|
||||||
|
err = executor.parseResponse(res, data, query)
|
||||||
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
|
So(len(res.Series), ShouldEqual, 11)
|
||||||
|
for i := 0; i < 11; i++ {
|
||||||
|
if i == 0 {
|
||||||
|
So(res.Series[i].Name, ShouldEqual, "0")
|
||||||
|
} else {
|
||||||
|
So(res.Series[i].Name, ShouldEqual, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10))
|
||||||
|
}
|
||||||
|
So(len(res.Series[i].Points), ShouldEqual, 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
Convey("timestamps should be in ascending order", func() {
|
||||||
|
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536668940000)
|
||||||
|
So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536669000000)
|
||||||
|
So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536669060000)
|
||||||
|
})
|
||||||
|
|
||||||
|
Convey("value should be correct", func() {
|
||||||
|
So(res.Series[8].Points[0][0].Float64, ShouldEqual, 1)
|
||||||
|
So(res.Series[9].Points[0][0].Float64, ShouldEqual, 1)
|
||||||
|
So(res.Series[10].Points[0][0].Float64, ShouldEqual, 1)
|
||||||
|
So(res.Series[8].Points[1][0].Float64, ShouldEqual, 0)
|
||||||
|
So(res.Series[9].Points[1][0].Float64, ShouldEqual, 0)
|
||||||
|
So(res.Series[10].Points[1][0].Float64, ShouldEqual, 1)
|
||||||
|
So(res.Series[8].Points[2][0].Float64, ShouldEqual, 0)
|
||||||
|
So(res.Series[9].Points[2][0].Float64, ShouldEqual, 1)
|
||||||
|
So(res.Series[10].Points[2][0].Float64, ShouldEqual, 0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("when interpolating filter wildcards", func() {
|
Convey("when interpolating filter wildcards", func() {
|
||||||
|
@ -0,0 +1,112 @@
|
|||||||
|
{
|
||||||
|
"timeSeries": [
|
||||||
|
{
|
||||||
|
"metric": {
|
||||||
|
"type": "loadbalancing.googleapis.com\/https\/backend_latencies"
|
||||||
|
},
|
||||||
|
"resource": {
|
||||||
|
"type": "https_lb_rule",
|
||||||
|
"labels": {
|
||||||
|
"project_id": "grafana-prod"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"metricKind": "DELTA",
|
||||||
|
"valueType": "DISTRIBUTION",
|
||||||
|
"points": [
|
||||||
|
{
|
||||||
|
"interval": {
|
||||||
|
"startTime": "2018-09-11T12:30:00Z",
|
||||||
|
"endTime": "2018-09-11T12:31:00Z"
|
||||||
|
},
|
||||||
|
"value": {
|
||||||
|
"distributionValue": {
|
||||||
|
"count": "1",
|
||||||
|
"bucketOptions": {
|
||||||
|
"exponentialBuckets": {
|
||||||
|
"numFiniteBuckets": 10,
|
||||||
|
"growthFactor": 2,
|
||||||
|
"scale": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bucketCounts": [
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"1",
|
||||||
|
"0"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"interval": {
|
||||||
|
"startTime": "2018-09-11T12:29:00Z",
|
||||||
|
"endTime": "2018-09-11T12:30:00Z"
|
||||||
|
},
|
||||||
|
"value": {
|
||||||
|
"distributionValue": {
|
||||||
|
"count": "1",
|
||||||
|
"bucketOptions": {
|
||||||
|
"exponentialBuckets": {
|
||||||
|
"numFiniteBuckets": 10,
|
||||||
|
"growthFactor": 2,
|
||||||
|
"scale": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bucketCounts": [
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"1"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"interval": {
|
||||||
|
"startTime": "2018-09-11T12:28:00Z",
|
||||||
|
"endTime": "2018-09-11T12:29:00Z"
|
||||||
|
},
|
||||||
|
"value": {
|
||||||
|
"distributionValue": {
|
||||||
|
"count": "3",
|
||||||
|
"bucketOptions": {
|
||||||
|
"exponentialBuckets": {
|
||||||
|
"numFiniteBuckets": 10,
|
||||||
|
"growthFactor": 2,
|
||||||
|
"scale": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bucketCounts": [
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"0",
|
||||||
|
"1",
|
||||||
|
"1",
|
||||||
|
"1"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -14,6 +14,22 @@ type StackdriverQuery struct {
|
|||||||
AliasBy string
|
AliasBy string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type StackdriverBucketOptions struct {
|
||||||
|
LinearBuckets *struct {
|
||||||
|
NumFiniteBuckets int64 `json:"numFiniteBuckets"`
|
||||||
|
Width int64 `json:"width"`
|
||||||
|
Offset int64 `json:"offset"`
|
||||||
|
} `json:"linearBuckets"`
|
||||||
|
ExponentialBuckets *struct {
|
||||||
|
NumFiniteBuckets int64 `json:"numFiniteBuckets"`
|
||||||
|
GrowthFactor float64 `json:"growthFactor"`
|
||||||
|
Scale float64 `json:"scale"`
|
||||||
|
} `json:"exponentialBuckets"`
|
||||||
|
ExplicitBuckets *struct {
|
||||||
|
Bounds []int64 `json:"bounds"`
|
||||||
|
} `json:"explicitBuckets"`
|
||||||
|
}
|
||||||
|
|
||||||
// StackdriverResponse is the data returned from the external Google Stackdriver API
|
// StackdriverResponse is the data returned from the external Google Stackdriver API
|
||||||
type StackdriverResponse struct {
|
type StackdriverResponse struct {
|
||||||
TimeSeries []struct {
|
TimeSeries []struct {
|
||||||
@ -37,6 +53,22 @@ type StackdriverResponse struct {
|
|||||||
StringValue string `json:"stringValue"`
|
StringValue string `json:"stringValue"`
|
||||||
BoolValue bool `json:"boolValue"`
|
BoolValue bool `json:"boolValue"`
|
||||||
IntValue string `json:"int64Value"`
|
IntValue string `json:"int64Value"`
|
||||||
|
DistributionValue struct {
|
||||||
|
Count string `json:"count"`
|
||||||
|
Mean float64 `json:"mean"`
|
||||||
|
SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"`
|
||||||
|
Range struct {
|
||||||
|
Min int `json:"min"`
|
||||||
|
Max int `json:"max"`
|
||||||
|
} `json:"range"`
|
||||||
|
BucketOptions StackdriverBucketOptions `json:"bucketOptions"`
|
||||||
|
BucketCounts []string `json:"bucketCounts"`
|
||||||
|
Examplars []struct {
|
||||||
|
Value float64 `json:"value"`
|
||||||
|
Timestamp string `json:"timestamp"`
|
||||||
|
// attachments
|
||||||
|
} `json:"examplars"`
|
||||||
|
} `json:"distributionValue"`
|
||||||
} `json:"value"`
|
} `json:"value"`
|
||||||
} `json:"points"`
|
} `json:"points"`
|
||||||
} `json:"timeSeries"`
|
} `json:"timeSeries"`
|
||||||
|
@ -19,7 +19,7 @@ export const alignOptions = [
|
|||||||
{
|
{
|
||||||
text: 'delta',
|
text: 'delta',
|
||||||
value: 'ALIGN_DELTA',
|
value: 'ALIGN_DELTA',
|
||||||
valueTypes: [ValueTypes.INT64, ValueTypes.DOUBLE, ValueTypes.MONEY],
|
valueTypes: [ValueTypes.INT64, ValueTypes.DOUBLE, ValueTypes.MONEY, ValueTypes.DISTRIBUTION],
|
||||||
metricKinds: [MetricKind.CUMULATIVE, MetricKind.DELTA],
|
metricKinds: [MetricKind.CUMULATIVE, MetricKind.DELTA],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user