mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
GoogleCloudMonitoring: Refactor parseResponse (#59595)
* GoogleCloudMonitoring: Remove duplicated code from buildDeepLink (#59600)
This commit is contained in:
parent
a2ed586576
commit
0d4fd305c4
@ -2,7 +2,6 @@ package cloudmonitoring
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strconv"
|
||||
@ -21,133 +20,78 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesList) run(ctx context.Context,
|
||||
return runTimeSeriesRequest(ctx, timeSeriesFilter.logger, req, s, dsInfo, tracer, timeSeriesFilter.parameters.ProjectName, timeSeriesFilter.params, nil)
|
||||
}
|
||||
|
||||
func extractTimeSeriesLabels(series timeSeries, groupBys []string) (data.Labels, string) {
|
||||
seriesLabels := data.Labels{}
|
||||
defaultMetricName := series.Metric.Type
|
||||
seriesLabels["resource.type"] = series.Resource.Type
|
||||
groupBysMap := make(map[string]bool)
|
||||
for _, groupBy := range groupBys {
|
||||
groupBysMap[groupBy] = true
|
||||
}
|
||||
|
||||
for key, value := range series.Metric.Labels {
|
||||
seriesLabels["metric.label."+key] = value
|
||||
|
||||
if len(groupBys) == 0 || groupBysMap["metric.label."+key] {
|
||||
defaultMetricName += " " + value
|
||||
}
|
||||
}
|
||||
|
||||
for key, value := range series.Resource.Labels {
|
||||
seriesLabels["resource.label."+key] = value
|
||||
|
||||
if groupBysMap["resource.label."+key] {
|
||||
defaultMetricName += " " + value
|
||||
}
|
||||
}
|
||||
|
||||
for labelType, labelTypeValues := range series.MetaData {
|
||||
for labelKey, labelValue := range labelTypeValues {
|
||||
key := xstrings.ToSnakeCase(fmt.Sprintf("metadata.%s.%s", labelType, labelKey))
|
||||
|
||||
switch v := labelValue.(type) {
|
||||
case string:
|
||||
seriesLabels[key] = v
|
||||
case bool:
|
||||
strVal := strconv.FormatBool(v)
|
||||
seriesLabels[key] = strVal
|
||||
case []interface{}:
|
||||
for _, v := range v {
|
||||
strVal := v.(string)
|
||||
if len(seriesLabels[key]) > 0 {
|
||||
strVal = fmt.Sprintf("%s, %s", seriesLabels[key], strVal)
|
||||
}
|
||||
seriesLabels[key] = strVal
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return seriesLabels, defaultMetricName
|
||||
}
|
||||
|
||||
func parseTimeSeriesResponse(queryRes *backend.DataResponse,
|
||||
response cloudMonitoringResponse, executedQueryString string, query cloudMonitoringQueryExecutor, params url.Values, groupBys []string) error {
|
||||
frames := data.Frames{}
|
||||
|
||||
for _, series := range response.TimeSeries {
|
||||
seriesLabels := data.Labels{}
|
||||
defaultMetricName := series.Metric.Type
|
||||
labels := make(map[string]string)
|
||||
labels["resource.type"] = series.Resource.Type
|
||||
seriesLabels["resource.type"] = series.Resource.Type
|
||||
groupBysMap := make(map[string]bool)
|
||||
for _, groupBy := range groupBys {
|
||||
groupBysMap[groupBy] = true
|
||||
}
|
||||
|
||||
seriesLabels, defaultMetricName := extractTimeSeriesLabels(series, groupBys)
|
||||
frame := data.NewFrameOfFieldTypes("", len(series.Points), data.FieldTypeTime, data.FieldTypeFloat64)
|
||||
frame.RefID = query.getRefID()
|
||||
frame.Meta = &data.FrameMeta{
|
||||
ExecutedQueryString: executedQueryString,
|
||||
Custom: map[string]interface{}{
|
||||
"alignmentPeriod": params.Get("aggregation.alignmentPeriod"),
|
||||
"perSeriesAligner": params.Get("aggregation.perSeriesAligner"),
|
||||
"labels": seriesLabels,
|
||||
"groupBys": groupBys,
|
||||
},
|
||||
}
|
||||
|
||||
for key, value := range series.Metric.Labels {
|
||||
labels["metric.label."+key] = value
|
||||
seriesLabels["metric.label."+key] = value
|
||||
|
||||
if len(groupBys) == 0 || groupBysMap["metric.label."+key] {
|
||||
defaultMetricName += " " + value
|
||||
}
|
||||
}
|
||||
|
||||
for key, value := range series.Resource.Labels {
|
||||
labels["resource.label."+key] = value
|
||||
seriesLabels["resource.label."+key] = value
|
||||
|
||||
if groupBysMap["resource.label."+key] {
|
||||
defaultMetricName += " " + value
|
||||
}
|
||||
}
|
||||
|
||||
for labelType, labelTypeValues := range series.MetaData {
|
||||
for labelKey, labelValue := range labelTypeValues {
|
||||
key := xstrings.ToSnakeCase(fmt.Sprintf("metadata.%s.%s", labelType, labelKey))
|
||||
|
||||
switch v := labelValue.(type) {
|
||||
case string:
|
||||
labels[key] = v
|
||||
seriesLabels[key] = v
|
||||
case bool:
|
||||
strVal := strconv.FormatBool(v)
|
||||
labels[key] = strVal
|
||||
seriesLabels[key] = strVal
|
||||
case []interface{}:
|
||||
for _, v := range v {
|
||||
strVal := v.(string)
|
||||
labels[key] = strVal
|
||||
if len(seriesLabels[key]) > 0 {
|
||||
strVal = fmt.Sprintf("%s, %s", seriesLabels[key], strVal)
|
||||
}
|
||||
seriesLabels[key] = strVal
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
customFrameMeta := map[string]interface{}{}
|
||||
customFrameMeta["alignmentPeriod"] = params.Get("aggregation.alignmentPeriod")
|
||||
customFrameMeta["perSeriesAligner"] = params.Get("aggregation.perSeriesAligner")
|
||||
customFrameMeta["labels"] = labels
|
||||
customFrameMeta["groupBys"] = groupBys
|
||||
if frame.Meta != nil {
|
||||
frame.Meta.Custom = customFrameMeta
|
||||
} else {
|
||||
frame.SetMeta(&data.FrameMeta{Custom: customFrameMeta})
|
||||
}
|
||||
|
||||
// reverse the order to be ascending
|
||||
if series.ValueType != "DISTRIBUTION" {
|
||||
handleNonDistributionSeries(series, defaultMetricName, seriesLabels, frame, query)
|
||||
frames = append(frames, frame)
|
||||
continue
|
||||
}
|
||||
buckets := make(map[int]*data.Frame)
|
||||
for i := len(series.Points) - 1; i >= 0; i-- {
|
||||
point := series.Points[i]
|
||||
if len(point.Value.DistributionValue.BucketCounts) == 0 {
|
||||
continue
|
||||
}
|
||||
for i := 0; i < len(point.Value.DistributionValue.BucketCounts); i++ {
|
||||
value, err := strconv.ParseFloat(point.Value.DistributionValue.BucketCounts[i], 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, ok := buckets[i]; !ok {
|
||||
// set lower bounds
|
||||
// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
|
||||
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
|
||||
additionalLabels := map[string]string{"bucket": bucketBound}
|
||||
|
||||
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{})
|
||||
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{})
|
||||
|
||||
frameName := formatLegendKeys(series.Metric.Type, defaultMetricName, nil, additionalLabels, query)
|
||||
valueField.Name = frameName
|
||||
valueField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(valueField)
|
||||
|
||||
buckets[i] = &data.Frame{
|
||||
Name: frameName,
|
||||
Fields: []*data.Field{
|
||||
timeField,
|
||||
valueField,
|
||||
},
|
||||
RefID: query.getRefID(),
|
||||
Meta: &data.FrameMeta{
|
||||
ExecutedQueryString: executedQueryString,
|
||||
},
|
||||
}
|
||||
}
|
||||
buckets[i].AppendRow(point.Interval.EndTime, value)
|
||||
}
|
||||
}
|
||||
for i := 0; i < len(buckets); i++ {
|
||||
buckets[i].Meta.Custom = customFrameMeta
|
||||
frames = append(frames, buckets[i])
|
||||
}
|
||||
if len(buckets) == 0 {
|
||||
frames = append(frames, frame)
|
||||
var err error
|
||||
frames, err = appendFrames(frames, series, 0, defaultMetricName, seriesLabels, frame, query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if len(response.TimeSeries) > 0 {
|
||||
@ -165,36 +109,6 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesList) parseResponse(queryRes *b
|
||||
return parseTimeSeriesResponse(queryRes, response, executedQueryString, timeSeriesFilter, timeSeriesFilter.params, timeSeriesFilter.parameters.GroupBys)
|
||||
}
|
||||
|
||||
func handleNonDistributionSeries(series timeSeries,
|
||||
defaultMetricName string, seriesLabels map[string]string, frame *data.Frame, query cloudMonitoringQueryExecutor) {
|
||||
for i := 0; i < len(series.Points); i++ {
|
||||
point := series.Points[i]
|
||||
value := point.Value.DoubleValue
|
||||
|
||||
if series.ValueType == "INT64" {
|
||||
parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
|
||||
if err == nil {
|
||||
value = parsedValue
|
||||
}
|
||||
}
|
||||
|
||||
if series.ValueType == "BOOL" {
|
||||
if point.Value.BoolValue {
|
||||
value = 1
|
||||
} else {
|
||||
value = 0
|
||||
}
|
||||
}
|
||||
frame.SetRow(len(series.Points)-1-i, point.Interval.EndTime, value)
|
||||
}
|
||||
|
||||
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, seriesLabels, nil, query)
|
||||
dataField := frame.Fields[1]
|
||||
dataField.Name = metricName
|
||||
dataField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(dataField)
|
||||
}
|
||||
|
||||
func (timeSeriesFilter *cloudMonitoringTimeSeriesList) buildDeepLink() string {
|
||||
filter := timeSeriesFilter.params.Get("filter")
|
||||
if !strings.Contains(filter, "resource.type=") {
|
||||
@ -203,69 +117,35 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesList) buildDeepLink() string {
|
||||
filter = fmt.Sprintf(`resource.type="%s" %s`, resourceType, filter)
|
||||
}
|
||||
}
|
||||
|
||||
u, err := url.Parse("https://console.cloud.google.com/monitoring/metrics-explorer")
|
||||
if err != nil {
|
||||
slog.Error("Failed to generate deep link: unable to parse metrics explorer URL", "ProjectName",
|
||||
timeSeriesFilter.parameters.ProjectName, "query", timeSeriesFilter.refID)
|
||||
return ""
|
||||
}
|
||||
|
||||
rawQuery := u.Query()
|
||||
rawQuery.Set("project", timeSeriesFilter.parameters.ProjectName)
|
||||
rawQuery.Set("Grafana_deeplink", "true")
|
||||
|
||||
pageState := map[string]interface{}{
|
||||
"xyChart": map[string]interface{}{
|
||||
"constantLines": []string{},
|
||||
"dataSets": []map[string]interface{}{
|
||||
{
|
||||
"timeSeriesFilter": map[string]interface{}{
|
||||
"aggregations": []string{},
|
||||
"crossSeriesReducer": timeSeriesFilter.params.Get("aggregation.crossSeriesReducer"),
|
||||
"filter": filter,
|
||||
"groupByFields": timeSeriesFilter.params["aggregation.groupByFields"],
|
||||
"minAlignmentPeriod": strings.TrimPrefix(timeSeriesFilter.params.Get("aggregation.alignmentPeriod"), "+"), // get rid of leading +
|
||||
"perSeriesAligner": timeSeriesFilter.params.Get("aggregation.perSeriesAligner"),
|
||||
"secondaryGroupByFields": []string{},
|
||||
"unitOverride": "1",
|
||||
},
|
||||
},
|
||||
dataSets := []map[string]interface{}{
|
||||
{
|
||||
"timeSeriesFilter": map[string]interface{}{
|
||||
"aggregations": []string{},
|
||||
"crossSeriesReducer": timeSeriesFilter.params.Get("aggregation.crossSeriesReducer"),
|
||||
"filter": filter,
|
||||
"groupByFields": timeSeriesFilter.params["aggregation.groupByFields"],
|
||||
"minAlignmentPeriod": strings.TrimPrefix(timeSeriesFilter.params.Get("aggregation.alignmentPeriod"), "+"), // get rid of leading +
|
||||
"perSeriesAligner": timeSeriesFilter.params.Get("aggregation.perSeriesAligner"),
|
||||
"secondaryGroupByFields": []string{},
|
||||
"unitOverride": "1",
|
||||
},
|
||||
"timeshiftDuration": "0s",
|
||||
"y1Axis": map[string]string{
|
||||
"label": "y1Axis",
|
||||
"scale": "LINEAR",
|
||||
},
|
||||
},
|
||||
"timeSelection": map[string]string{
|
||||
"timeRange": "custom",
|
||||
"start": timeSeriesFilter.params.Get("interval.startTime"),
|
||||
"end": timeSeriesFilter.params.Get("interval.endTime"),
|
||||
},
|
||||
}
|
||||
}}
|
||||
|
||||
blob, err := json.Marshal(pageState)
|
||||
link, err := generateLink(
|
||||
timeSeriesFilter.parameters.ProjectName,
|
||||
dataSets,
|
||||
timeSeriesFilter.params.Get("interval.startTime"),
|
||||
timeSeriesFilter.params.Get("interval.endTime"),
|
||||
)
|
||||
if err != nil {
|
||||
slog.Error("Failed to generate deep link", "pageState", pageState, "ProjectName", timeSeriesFilter.parameters.ProjectName,
|
||||
"query", timeSeriesFilter.refID)
|
||||
return ""
|
||||
slog.Error(
|
||||
"Failed to generate deep link: unable to parse metrics explorer URL",
|
||||
"ProjectName", timeSeriesFilter.parameters.ProjectName,
|
||||
"error", err,
|
||||
)
|
||||
}
|
||||
|
||||
rawQuery.Set("pageState", string(blob))
|
||||
u.RawQuery = rawQuery.Encode()
|
||||
|
||||
accountChooserURL, err := url.Parse("https://accounts.google.com/AccountChooser")
|
||||
if err != nil {
|
||||
slog.Error("Failed to generate deep link: unable to parse account chooser URL", "ProjectName",
|
||||
timeSeriesFilter.parameters.ProjectName, "query", timeSeriesFilter.refID)
|
||||
return ""
|
||||
}
|
||||
accountChooserQuery := accountChooserURL.Query()
|
||||
accountChooserQuery.Set("continue", u.String())
|
||||
accountChooserURL.RawQuery = accountChooserQuery.Encode()
|
||||
|
||||
return accountChooserURL.String()
|
||||
return link
|
||||
}
|
||||
|
||||
func setDisplayNameAsFieldName(f *data.Field) {
|
||||
|
@ -371,7 +371,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
frames := res.Frames
|
||||
custom, ok := frames[0].Meta.Custom.(map[string]interface{})
|
||||
require.True(t, ok)
|
||||
labels, ok := custom["labels"].(map[string]string)
|
||||
labels, ok := custom["labels"].(sdkdata.Labels)
|
||||
require.True(t, ok)
|
||||
assert.Equal(t, "114250375703598695", labels["resource.label.instance_id"])
|
||||
})
|
||||
@ -406,7 +406,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
Custom: map[string]interface{}{
|
||||
"groupBys": []string{"test_group_by"},
|
||||
"alignmentPeriod": "",
|
||||
"labels": map[string]string{
|
||||
"labels": sdkdata.Labels{
|
||||
"resource.label.project_id": "grafana-prod",
|
||||
"resource.type": "https_lb_rule",
|
||||
},
|
||||
@ -429,7 +429,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
Custom: map[string]interface{}{
|
||||
"groupBys": []string{"test_group_by"},
|
||||
"alignmentPeriod": "",
|
||||
"labels": map[string]string{
|
||||
"labels": sdkdata.Labels{
|
||||
"resource.label.project_id": "grafana-demo",
|
||||
"resource.type": "global",
|
||||
},
|
||||
@ -452,7 +452,7 @@ func TestTimeSeriesFilter(t *testing.T) {
|
||||
Custom: map[string]interface{}{
|
||||
"groupBys": []string{"test_group_by"},
|
||||
"alignmentPeriod": "",
|
||||
"labels": map[string]string{
|
||||
"labels": sdkdata.Labels{
|
||||
"resource.label.project_id": "grafana-prod",
|
||||
"resource.type": "https_lb_rule",
|
||||
},
|
||||
|
@ -2,9 +2,7 @@ package cloudmonitoring
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
@ -44,37 +42,34 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) run(ctx context.Context,
|
||||
return runTimeSeriesRequest(ctx, timeSeriesQuery.logger, req, s, dsInfo, tracer, timeSeriesQuery.parameters.ProjectName, nil, requestBody)
|
||||
}
|
||||
|
||||
func extractTimeSeriesDataLabels(response cloudMonitoringResponse, series timeSeriesData) map[string]string {
|
||||
seriesLabels := make(map[string]string)
|
||||
for n, d := range response.TimeSeriesDescriptor.LabelDescriptors {
|
||||
key := xstrings.ToSnakeCase(d.Key)
|
||||
key = strings.Replace(key, ".", ".label.", 1)
|
||||
|
||||
labelValue := series.LabelValues[n]
|
||||
switch d.ValueType {
|
||||
case "BOOL":
|
||||
strVal := strconv.FormatBool(labelValue.BoolValue)
|
||||
seriesLabels[key] = strVal
|
||||
case "INT64":
|
||||
seriesLabels[key] = labelValue.Int64Value
|
||||
default:
|
||||
seriesLabels[key] = labelValue.StringValue
|
||||
}
|
||||
}
|
||||
return seriesLabels
|
||||
}
|
||||
|
||||
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *backend.DataResponse,
|
||||
response cloudMonitoringResponse, executedQueryString string) error {
|
||||
frames := data.Frames{}
|
||||
|
||||
for _, series := range response.TimeSeriesData {
|
||||
seriesLabels := make(map[string]string)
|
||||
frame := data.NewFrameOfFieldTypes("", len(series.PointData), data.FieldTypeTime, data.FieldTypeFloat64)
|
||||
frame.RefID = timeSeriesQuery.refID
|
||||
frame.Meta = &data.FrameMeta{
|
||||
ExecutedQueryString: executedQueryString,
|
||||
}
|
||||
labels := make(map[string]string)
|
||||
|
||||
for n, d := range response.TimeSeriesDescriptor.LabelDescriptors {
|
||||
key := xstrings.ToSnakeCase(d.Key)
|
||||
key = strings.Replace(key, ".", ".label.", 1)
|
||||
|
||||
labelValue := series.LabelValues[n]
|
||||
switch d.ValueType {
|
||||
case "BOOL":
|
||||
strVal := strconv.FormatBool(labelValue.BoolValue)
|
||||
labels[key] = strVal
|
||||
seriesLabels[key] = strVal
|
||||
case "INT64":
|
||||
labels[key] = labelValue.Int64Value
|
||||
seriesLabels[key] = labelValue.Int64Value
|
||||
default:
|
||||
labels[key] = labelValue.StringValue
|
||||
seriesLabels[key] = labelValue.StringValue
|
||||
}
|
||||
}
|
||||
seriesLabels := extractTimeSeriesDataLabels(response, series)
|
||||
|
||||
for n, d := range response.TimeSeriesDescriptor.PointDescriptors {
|
||||
// If more than 1 pointdescriptor was returned, three aggregations are returned per time series - min, mean and max.
|
||||
@ -85,136 +80,23 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
continue
|
||||
}
|
||||
|
||||
labels["metric.name"] = d.Key
|
||||
seriesLabels["metric.name"] = d.Key
|
||||
defaultMetricName := d.Key
|
||||
|
||||
// process non-distribution series
|
||||
if d.ValueType != "DISTRIBUTION" {
|
||||
// reverse the order to be ascending
|
||||
for i := len(series.PointData) - 1; i >= 0; i-- {
|
||||
point := series.PointData[i]
|
||||
value := point.Values[n].DoubleValue
|
||||
|
||||
if d.ValueType == "INT64" {
|
||||
parsedValue, err := strconv.ParseFloat(point.Values[n].Int64Value, 64)
|
||||
if err == nil {
|
||||
value = parsedValue
|
||||
}
|
||||
} else if d.ValueType == "BOOL" {
|
||||
if point.Values[n].BoolValue {
|
||||
value = 1
|
||||
} else {
|
||||
value = 0
|
||||
}
|
||||
}
|
||||
|
||||
frame.SetRow(len(series.PointData)-1-i, series.PointData[i].TimeInterval.EndTime, value)
|
||||
}
|
||||
|
||||
metricName := formatLegendKeys(d.Key, defaultMetricName, seriesLabels, nil,
|
||||
&cloudMonitoringTimeSeriesList{
|
||||
parameters: &timeSeriesList{
|
||||
ProjectName: timeSeriesQuery.parameters.ProjectName,
|
||||
},
|
||||
aliasBy: timeSeriesQuery.aliasBy,
|
||||
logger: timeSeriesQuery.logger,
|
||||
})
|
||||
dataField := frame.Fields[1]
|
||||
dataField.Name = metricName
|
||||
dataField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(dataField)
|
||||
|
||||
frames = append(frames, frame)
|
||||
continue
|
||||
customFrameMeta := map[string]interface{}{}
|
||||
customFrameMeta["labels"] = seriesLabels
|
||||
frameMeta := &data.FrameMeta{
|
||||
ExecutedQueryString: executedQueryString,
|
||||
Custom: customFrameMeta,
|
||||
}
|
||||
frame.Meta = frameMeta
|
||||
|
||||
// process distribution series
|
||||
buckets := make(map[int]*data.Frame)
|
||||
// reverse the order to be ascending
|
||||
for i := len(series.PointData) - 1; i >= 0; i-- {
|
||||
point := series.PointData[i]
|
||||
if len(point.Values[n].DistributionValue.BucketCounts) == 0 {
|
||||
continue
|
||||
}
|
||||
maxKey := 0
|
||||
for i := 0; i < len(point.Values[n].DistributionValue.BucketCounts); i++ {
|
||||
value, err := strconv.ParseFloat(point.Values[n].DistributionValue.BucketCounts[i], 64)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if _, ok := buckets[i]; !ok {
|
||||
// set lower bounds
|
||||
// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
|
||||
bucketBound := calcBucketBound(point.Values[n].DistributionValue.BucketOptions, i)
|
||||
additionalLabels := map[string]string{"bucket": bucketBound}
|
||||
|
||||
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{})
|
||||
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{})
|
||||
|
||||
frameName := formatLegendKeys(d.Key, defaultMetricName, nil, additionalLabels, &cloudMonitoringTimeSeriesList{
|
||||
parameters: &timeSeriesList{ProjectName: timeSeriesQuery.parameters.ProjectName},
|
||||
aliasBy: timeSeriesQuery.aliasBy,
|
||||
logger: timeSeriesQuery.logger,
|
||||
})
|
||||
valueField.Name = frameName
|
||||
valueField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(valueField)
|
||||
|
||||
buckets[i] = &data.Frame{
|
||||
Name: frameName,
|
||||
Fields: []*data.Field{
|
||||
timeField,
|
||||
valueField,
|
||||
},
|
||||
RefID: timeSeriesQuery.refID,
|
||||
}
|
||||
|
||||
if maxKey < i {
|
||||
maxKey = i
|
||||
}
|
||||
}
|
||||
buckets[i].AppendRow(point.TimeInterval.EndTime, value)
|
||||
}
|
||||
|
||||
// fill empty bucket
|
||||
for i := 0; i < maxKey; i++ {
|
||||
if _, ok := buckets[i]; !ok {
|
||||
bucketBound := calcBucketBound(point.Values[n].DistributionValue.BucketOptions, i)
|
||||
additionalLabels := data.Labels{"bucket": bucketBound}
|
||||
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{})
|
||||
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{})
|
||||
frameName := formatLegendKeys(d.Key, defaultMetricName, seriesLabels, additionalLabels, &cloudMonitoringTimeSeriesList{
|
||||
parameters: &timeSeriesList{ProjectName: timeSeriesQuery.parameters.ProjectName},
|
||||
aliasBy: timeSeriesQuery.aliasBy,
|
||||
logger: timeSeriesQuery.logger,
|
||||
})
|
||||
valueField.Name = frameName
|
||||
valueField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(valueField)
|
||||
|
||||
buckets[i] = &data.Frame{
|
||||
Name: frameName,
|
||||
Fields: []*data.Field{
|
||||
timeField,
|
||||
valueField,
|
||||
},
|
||||
RefID: timeSeriesQuery.refID,
|
||||
}
|
||||
}
|
||||
}
|
||||
var err error
|
||||
iterator := timeSeriesDataIterator{series, d}
|
||||
frames, err = appendFrames(frames, iterator, n, defaultMetricName, seriesLabels, frame, timeSeriesQuery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := 0; i < len(buckets); i++ {
|
||||
frames = append(frames, buckets[i])
|
||||
}
|
||||
}
|
||||
|
||||
customFrameMeta := map[string]interface{}{}
|
||||
customFrameMeta["labels"] = labels
|
||||
if frame.Meta != nil {
|
||||
frame.Meta.Custom = customFrameMeta
|
||||
} else {
|
||||
frame.SetMeta(&data.FrameMeta{Custom: customFrameMeta})
|
||||
}
|
||||
}
|
||||
if len(response.TimeSeriesData) > 0 {
|
||||
@ -228,58 +110,29 @@ func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *b
|
||||
}
|
||||
|
||||
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) buildDeepLink() string {
|
||||
u, err := url.Parse("https://console.cloud.google.com/monitoring/metrics-explorer")
|
||||
if err != nil {
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link: unable to parse metrics explorer URL", "projectName", timeSeriesQuery.parameters.ProjectName, "query", timeSeriesQuery.refID)
|
||||
return ""
|
||||
}
|
||||
|
||||
q := u.Query()
|
||||
q.Set("project", timeSeriesQuery.parameters.ProjectName)
|
||||
q.Set("Grafana_deeplink", "true")
|
||||
|
||||
pageState := map[string]interface{}{
|
||||
"xyChart": map[string]interface{}{
|
||||
"constantLines": []string{},
|
||||
"dataSets": []map[string]interface{}{
|
||||
{
|
||||
"timeSeriesQuery": timeSeriesQuery.parameters.Query,
|
||||
"targetAxis": "Y1",
|
||||
"plotType": "LINE",
|
||||
},
|
||||
},
|
||||
"timeshiftDuration": "0s",
|
||||
"y1Axis": map[string]string{
|
||||
"label": "y1Axis",
|
||||
"scale": "LINEAR",
|
||||
},
|
||||
},
|
||||
"timeSelection": map[string]string{
|
||||
"timeRange": "custom",
|
||||
"start": timeSeriesQuery.timeRange.From.Format(time.RFC3339Nano),
|
||||
"end": timeSeriesQuery.timeRange.To.Format(time.RFC3339Nano),
|
||||
dataSets := []map[string]interface{}{
|
||||
{
|
||||
"timeSeriesQuery": timeSeriesQuery.parameters.Query,
|
||||
"targetAxis": "Y1",
|
||||
"plotType": "LINE",
|
||||
},
|
||||
}
|
||||
|
||||
blob, err := json.Marshal(pageState)
|
||||
link, err := generateLink(
|
||||
timeSeriesQuery.parameters.ProjectName,
|
||||
dataSets,
|
||||
timeSeriesQuery.timeRange.From.Format(time.RFC3339Nano),
|
||||
timeSeriesQuery.timeRange.To.Format(time.RFC3339Nano),
|
||||
)
|
||||
if err != nil {
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link", "pageState", pageState, "ProjectName", timeSeriesQuery.parameters.ProjectName, "query", timeSeriesQuery.refID)
|
||||
return ""
|
||||
slog.Error(
|
||||
"Failed to generate deep link: unable to parse metrics explorer URL",
|
||||
"ProjectName", timeSeriesQuery.parameters.Query,
|
||||
"error", err,
|
||||
)
|
||||
}
|
||||
|
||||
q.Set("pageState", string(blob))
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
accountChooserURL, err := url.Parse("https://accounts.google.com/AccountChooser")
|
||||
if err != nil {
|
||||
timeSeriesQuery.logger.Error("Failed to generate deep link: unable to parse account chooser URL", "ProjectName", timeSeriesQuery.parameters.ProjectName, "query", timeSeriesQuery.refID)
|
||||
return ""
|
||||
}
|
||||
accountChooserQuery := accountChooserURL.Query()
|
||||
accountChooserQuery.Set("continue", u.String())
|
||||
accountChooserURL.RawQuery = accountChooserQuery.Encode()
|
||||
|
||||
return accountChooserURL.String()
|
||||
return link
|
||||
}
|
||||
|
||||
func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) getRefID() string {
|
||||
|
@ -115,59 +115,108 @@ type (
|
||||
cloudMonitoringResponse struct {
|
||||
TimeSeries []timeSeries `json:"timeSeries"`
|
||||
TimeSeriesDescriptor timeSeriesDescriptor `json:"timeSeriesDescriptor"`
|
||||
TimeSeriesData timeSeriesData `json:"timeSeriesData"`
|
||||
TimeSeriesData []timeSeriesData `json:"timeSeriesData"`
|
||||
Unit string `json:"unit"`
|
||||
NextPageToken string `json:"nextPageToken"`
|
||||
}
|
||||
)
|
||||
|
||||
type pointIterator interface {
|
||||
length() int
|
||||
getPoint(index int) point
|
||||
metricType() string
|
||||
valueType() string
|
||||
}
|
||||
|
||||
type point interface {
|
||||
doubleValue(descriptorIndex int) float64
|
||||
int64Value(descriptorIndex int) string
|
||||
boolValue(descriptorIndex int) bool
|
||||
bucketCounts(descriptorIndex int) []string
|
||||
bucketValue(descriptorIndex int, bucketCountIndex int) string
|
||||
bucketOptions(descriptorIndex int) cloudMonitoringBucketOptions
|
||||
endTime() time.Time
|
||||
}
|
||||
|
||||
type timeSeriesDescriptor struct {
|
||||
LabelDescriptors []struct {
|
||||
Key string `json:"key"`
|
||||
ValueType string `json:"valueType"`
|
||||
Description string `json:"description"`
|
||||
} `json:"labelDescriptors"`
|
||||
PointDescriptors []struct {
|
||||
Key string `json:"key"`
|
||||
ValueType string `json:"valueType"`
|
||||
MetricKind string `json:"metricKind"`
|
||||
} `json:"pointDescriptors"`
|
||||
PointDescriptors []timeSeriesPointDescriptor `json:"pointDescriptors"`
|
||||
}
|
||||
|
||||
type timeSeriesData []struct {
|
||||
type timeSeriesPointDescriptor struct {
|
||||
Key string `json:"key"`
|
||||
ValueType string `json:"valueType"`
|
||||
MetricKind string `json:"metricKind"`
|
||||
}
|
||||
|
||||
func (ts timeSeriesPointDescriptor) metricType() string {
|
||||
return ts.Key
|
||||
}
|
||||
|
||||
func (ts timeSeriesPointDescriptor) valueType() string {
|
||||
return ts.ValueType
|
||||
}
|
||||
|
||||
type timeSeriesData struct {
|
||||
LabelValues []struct {
|
||||
BoolValue bool `json:"boolValue"`
|
||||
Int64Value string `json:"int64Value"`
|
||||
StringValue string `json:"stringValue"`
|
||||
} `json:"labelValues"`
|
||||
PointData []struct {
|
||||
Values []struct {
|
||||
BoolValue bool `json:"boolValue"`
|
||||
Int64Value string `json:"int64Value"`
|
||||
DoubleValue float64 `json:"doubleValue"`
|
||||
StringValue string `json:"stringValue"`
|
||||
DistributionValue struct {
|
||||
Count string `json:"count"`
|
||||
Mean float64 `json:"mean"`
|
||||
SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"`
|
||||
Range struct {
|
||||
Min int `json:"min"`
|
||||
Max int `json:"max"`
|
||||
} `json:"range"`
|
||||
BucketOptions cloudMonitoringBucketOptions `json:"bucketOptions"`
|
||||
BucketCounts []string `json:"bucketCounts"`
|
||||
Examplars []struct {
|
||||
Value float64 `json:"value"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
// attachments
|
||||
} `json:"examplars"`
|
||||
} `json:"distributionValue"`
|
||||
} `json:"values"`
|
||||
TimeInterval struct {
|
||||
EndTime time.Time `json:"endTime"`
|
||||
StartTime time.Time `json:"startTime"`
|
||||
} `json:"timeInterval"`
|
||||
} `json:"pointData"`
|
||||
PointData []timeSeriesPointData `json:"pointData"`
|
||||
}
|
||||
|
||||
func (ts timeSeriesData) length() int {
|
||||
return len(ts.PointData)
|
||||
}
|
||||
|
||||
func (ts timeSeriesData) getPoint(index int) point {
|
||||
return &ts.PointData[index]
|
||||
}
|
||||
|
||||
type timeSeriesDataIterator struct {
|
||||
timeSeriesData
|
||||
timeSeriesPointDescriptor
|
||||
}
|
||||
|
||||
type timeSeriesPointData struct {
|
||||
Values []timeSeriesPointValue `json:"values"`
|
||||
TimeInterval struct {
|
||||
EndTime time.Time `json:"endTime"`
|
||||
StartTime time.Time `json:"startTime"`
|
||||
} `json:"timeInterval"`
|
||||
}
|
||||
|
||||
func (point timeSeriesPointData) doubleValue(descriptorIndex int) float64 {
|
||||
return point.Values[descriptorIndex].DoubleValue
|
||||
}
|
||||
|
||||
func (point timeSeriesPointData) int64Value(descriptorIndex int) string {
|
||||
return point.Values[descriptorIndex].IntValue
|
||||
}
|
||||
|
||||
func (point timeSeriesPointData) boolValue(descriptorIndex int) bool {
|
||||
return point.Values[descriptorIndex].BoolValue
|
||||
}
|
||||
|
||||
func (point timeSeriesPointData) bucketCounts(descriptorIndex int) []string {
|
||||
return point.Values[descriptorIndex].DistributionValue.BucketCounts
|
||||
}
|
||||
|
||||
func (point timeSeriesPointData) bucketValue(descriptorIndex int, bucketCountIndex int) string {
|
||||
return point.Values[descriptorIndex].DistributionValue.BucketCounts[bucketCountIndex]
|
||||
}
|
||||
|
||||
func (point timeSeriesPointData) bucketOptions(descriptorIndex int) cloudMonitoringBucketOptions {
|
||||
return point.Values[descriptorIndex].DistributionValue.BucketOptions
|
||||
}
|
||||
|
||||
func (point timeSeriesPointData) endTime() time.Time {
|
||||
return point.TimeInterval.EndTime
|
||||
}
|
||||
|
||||
type timeSeries struct {
|
||||
@ -182,34 +231,82 @@ type timeSeries struct {
|
||||
MetaData map[string]map[string]interface{} `json:"metadata"`
|
||||
MetricKind string `json:"metricKind"`
|
||||
ValueType string `json:"valueType"`
|
||||
Points []struct {
|
||||
Interval struct {
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime time.Time `json:"endTime"`
|
||||
} `json:"interval"`
|
||||
Value struct {
|
||||
DoubleValue float64 `json:"doubleValue"`
|
||||
StringValue string `json:"stringValue"`
|
||||
BoolValue bool `json:"boolValue"`
|
||||
IntValue string `json:"int64Value"`
|
||||
DistributionValue struct {
|
||||
Count string `json:"count"`
|
||||
Mean float64 `json:"mean"`
|
||||
SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"`
|
||||
Range struct {
|
||||
Min int `json:"min"`
|
||||
Max int `json:"max"`
|
||||
} `json:"range"`
|
||||
BucketOptions cloudMonitoringBucketOptions `json:"bucketOptions"`
|
||||
BucketCounts []string `json:"bucketCounts"`
|
||||
Examplars []struct {
|
||||
Value float64 `json:"value"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
// attachments
|
||||
} `json:"examplars"`
|
||||
} `json:"distributionValue"`
|
||||
} `json:"value"`
|
||||
} `json:"points"`
|
||||
Points []timeSeriesPoint `json:"points"`
|
||||
}
|
||||
|
||||
func (ts timeSeries) length() int {
|
||||
return len(ts.Points)
|
||||
}
|
||||
|
||||
func (ts timeSeries) getPoint(index int) point {
|
||||
return &ts.Points[index]
|
||||
}
|
||||
|
||||
func (ts timeSeries) metricType() string {
|
||||
return ts.Metric.Type
|
||||
}
|
||||
|
||||
func (ts timeSeries) valueType() string {
|
||||
return ts.ValueType
|
||||
}
|
||||
|
||||
type timeSeriesPoint struct {
|
||||
Interval struct {
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime time.Time `json:"endTime"`
|
||||
} `json:"interval"`
|
||||
Value timeSeriesPointValue `json:"value"`
|
||||
}
|
||||
|
||||
type timeSeriesPointValue struct {
|
||||
DoubleValue float64 `json:"doubleValue"`
|
||||
StringValue string `json:"stringValue"`
|
||||
BoolValue bool `json:"boolValue"`
|
||||
IntValue string `json:"int64Value"`
|
||||
DistributionValue struct {
|
||||
Count string `json:"count"`
|
||||
Mean float64 `json:"mean"`
|
||||
SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"`
|
||||
Range struct {
|
||||
Min int `json:"min"`
|
||||
Max int `json:"max"`
|
||||
} `json:"range"`
|
||||
BucketOptions cloudMonitoringBucketOptions `json:"bucketOptions"`
|
||||
BucketCounts []string `json:"bucketCounts"`
|
||||
Examplars []struct {
|
||||
Value float64 `json:"value"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
// attachments
|
||||
} `json:"examplars"`
|
||||
} `json:"distributionValue"`
|
||||
}
|
||||
|
||||
func (point timeSeriesPoint) doubleValue(descriptorIndex int) float64 {
|
||||
return point.Value.DoubleValue
|
||||
}
|
||||
|
||||
func (point timeSeriesPoint) int64Value(descriptorIndex int) string {
|
||||
return point.Value.IntValue
|
||||
}
|
||||
|
||||
func (point timeSeriesPoint) boolValue(descriptorIndex int) bool {
|
||||
return point.Value.BoolValue
|
||||
}
|
||||
|
||||
func (point timeSeriesPoint) bucketCounts(descriptorIndex int) []string {
|
||||
return point.Value.DistributionValue.BucketCounts
|
||||
}
|
||||
|
||||
func (point timeSeriesPoint) bucketValue(descriptorIndex int, bucketCountIndex int) string {
|
||||
return point.Value.DistributionValue.BucketCounts[bucketCountIndex]
|
||||
}
|
||||
|
||||
func (point timeSeriesPoint) bucketOptions(descriptorIndex int) cloudMonitoringBucketOptions {
|
||||
return point.Value.DistributionValue.BucketOptions
|
||||
}
|
||||
|
||||
func (point timeSeriesPoint) endTime() time.Time {
|
||||
return point.Interval.EndTime
|
||||
}
|
||||
|
||||
type metricDescriptorResponse struct {
|
||||
|
@ -9,7 +9,9 @@ import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
@ -155,3 +157,193 @@ func runTimeSeriesRequest(ctx context.Context, logger log.Logger, req *backend.Q
|
||||
|
||||
return dr, d, r.URL.RawQuery, nil
|
||||
}
|
||||
|
||||
func bucketFrame(
|
||||
bucketOptions cloudMonitoringBucketOptions,
|
||||
bucketBoundIndex int,
|
||||
metricType string,
|
||||
defaultMetricName string,
|
||||
query cloudMonitoringQueryExecutor,
|
||||
seriesLabels map[string]string,
|
||||
frameMeta *data.FrameMeta,
|
||||
) *data.Frame {
|
||||
// set lower bounds
|
||||
// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
|
||||
bucketBound := calcBucketBound(bucketOptions, bucketBoundIndex)
|
||||
additionalLabels := map[string]string{"bucket": bucketBound}
|
||||
|
||||
timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{})
|
||||
valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{})
|
||||
|
||||
frameName := formatLegendKeys(metricType, defaultMetricName, nil, additionalLabels, query)
|
||||
valueField.Name = frameName
|
||||
valueField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(valueField)
|
||||
|
||||
return &data.Frame{
|
||||
Name: frameName,
|
||||
Fields: []*data.Field{
|
||||
timeField,
|
||||
valueField,
|
||||
},
|
||||
RefID: query.getRefID(),
|
||||
Meta: frameMeta,
|
||||
}
|
||||
}
|
||||
|
||||
func handleDistributionSeries(
|
||||
it pointIterator,
|
||||
descriptorIndex int,
|
||||
defaultMetricName string,
|
||||
query cloudMonitoringQueryExecutor,
|
||||
seriesLabels map[string]string,
|
||||
frameMeta *data.FrameMeta,
|
||||
) (map[int]*data.Frame, error) {
|
||||
buckets := make(map[int]*data.Frame)
|
||||
for i := it.length() - 1; i >= 0; i-- {
|
||||
point := it.getPoint(i)
|
||||
if len(point.bucketCounts(descriptorIndex)) == 0 {
|
||||
continue
|
||||
}
|
||||
maxKey := 0
|
||||
for i := 0; i < len(point.bucketCounts(descriptorIndex)); i++ {
|
||||
value, err := strconv.ParseFloat(point.bucketValue(descriptorIndex, i), 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, ok := buckets[i]; !ok {
|
||||
buckets[i] = bucketFrame(
|
||||
point.bucketOptions(descriptorIndex), i,
|
||||
it.metricType(), defaultMetricName, query,
|
||||
seriesLabels,
|
||||
frameMeta,
|
||||
)
|
||||
if maxKey < i {
|
||||
maxKey = i
|
||||
}
|
||||
}
|
||||
buckets[i].AppendRow(point.endTime(), value)
|
||||
}
|
||||
|
||||
// fill empty bucket
|
||||
for i := 0; i < maxKey; i++ {
|
||||
if _, ok := buckets[i]; !ok {
|
||||
buckets[i] = bucketFrame(
|
||||
point.bucketOptions(descriptorIndex), i,
|
||||
it.metricType(), defaultMetricName, query,
|
||||
seriesLabels,
|
||||
frameMeta,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
return buckets, nil
|
||||
}
|
||||
|
||||
func handleNonDistributionSeries(
|
||||
series pointIterator,
|
||||
descriptorIndex int,
|
||||
defaultMetricName string,
|
||||
seriesLabels map[string]string,
|
||||
frame *data.Frame,
|
||||
query cloudMonitoringQueryExecutor,
|
||||
) {
|
||||
for i := 0; i < series.length(); i++ {
|
||||
point := series.getPoint(i)
|
||||
value := point.doubleValue(descriptorIndex)
|
||||
|
||||
if series.valueType() == "INT64" {
|
||||
parsedValue, err := strconv.ParseFloat(point.int64Value(descriptorIndex), 64)
|
||||
if err == nil {
|
||||
value = parsedValue
|
||||
}
|
||||
}
|
||||
|
||||
if series.valueType() == "BOOL" {
|
||||
if point.boolValue(descriptorIndex) {
|
||||
value = 1
|
||||
} else {
|
||||
value = 0
|
||||
}
|
||||
}
|
||||
frame.SetRow(series.length()-1-i, point.endTime(), value)
|
||||
}
|
||||
|
||||
metricName := formatLegendKeys(series.metricType(), defaultMetricName, seriesLabels, nil, query)
|
||||
dataField := frame.Fields[1]
|
||||
dataField.Name = metricName
|
||||
dataField.Labels = seriesLabels
|
||||
setDisplayNameAsFieldName(dataField)
|
||||
}
|
||||
|
||||
func appendFrames(
|
||||
frames data.Frames,
|
||||
series pointIterator,
|
||||
descriptorIndex int,
|
||||
defaultMetricName string,
|
||||
seriesLabels map[string]string,
|
||||
frame *data.Frame,
|
||||
query cloudMonitoringQueryExecutor,
|
||||
) (data.Frames, error) {
|
||||
if series.valueType() != "DISTRIBUTION" {
|
||||
handleNonDistributionSeries(series, descriptorIndex, defaultMetricName, seriesLabels, frame, query)
|
||||
return append(frames, frame), nil
|
||||
}
|
||||
buckets, err := handleDistributionSeries(series, descriptorIndex, defaultMetricName, query, seriesLabels, frame.Meta)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for i := 0; i < len(buckets); i++ {
|
||||
frames = append(frames, buckets[i])
|
||||
}
|
||||
if len(buckets) == 0 {
|
||||
frames = append(frames, frame)
|
||||
}
|
||||
return frames, nil
|
||||
}
|
||||
|
||||
func generateLink(projectName string, dataSets []map[string]interface{}, start, end string) (string, error) {
|
||||
u, err := url.Parse("https://console.cloud.google.com/monitoring/metrics-explorer")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
rawQuery := u.Query()
|
||||
rawQuery.Set("project", projectName)
|
||||
rawQuery.Set("Grafana_deeplink", "true")
|
||||
|
||||
pageState := map[string]interface{}{
|
||||
"xyChart": map[string]interface{}{
|
||||
"constantLines": []string{},
|
||||
"dataSets": dataSets,
|
||||
"timeshiftDuration": "0s",
|
||||
"y1Axis": map[string]string{
|
||||
"label": "y1Axis",
|
||||
"scale": "LINEAR",
|
||||
},
|
||||
},
|
||||
"timeSelection": map[string]string{
|
||||
"timeRange": "custom",
|
||||
"start": start,
|
||||
"end": end,
|
||||
},
|
||||
}
|
||||
|
||||
blob, err := json.Marshal(pageState)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
rawQuery.Set("pageState", string(blob))
|
||||
u.RawQuery = rawQuery.Encode()
|
||||
|
||||
accountChooserURL, err := url.Parse("https://accounts.google.com/AccountChooser")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
accountChooserQuery := accountChooserURL.Query()
|
||||
accountChooserQuery.Set("continue", u.String())
|
||||
accountChooserURL.RawQuery = accountChooserQuery.Encode()
|
||||
|
||||
return accountChooserURL.String(), nil
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user