2021-01-18 06:48:43 -06:00
package cloudmonitoring
import (
"bytes"
"context"
"encoding/json"
"fmt"
"net/url"
"path"
"strconv"
"strings"
"time"
2022-01-20 04:10:12 -06:00
"go.opentelemetry.io/otel/attribute"
2021-10-08 07:46:35 -05:00
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
2022-01-20 04:10:12 -06:00
"github.com/grafana/grafana/pkg/infra/tracing"
2021-10-08 07:46:35 -05:00
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
2021-01-18 06:48:43 -06:00
)
2022-05-09 06:43:10 -05:00
func ( timeSeriesQuery cloudMonitoringTimeSeriesQuery ) appendGraphPeriod ( req * backend . QueryDataRequest ) string {
// GraphPeriod needs to be explicitly disabled.
// If not set, the default behavior is to set an automatic value
if timeSeriesQuery . GraphPeriod != "disabled" {
graphPeriod := timeSeriesQuery . GraphPeriod
if graphPeriod == "auto" || graphPeriod == "" {
intervalCalculator := intervalv2 . NewCalculator ( intervalv2 . CalculatorOptions { } )
interval := intervalCalculator . Calculate ( req . Queries [ 0 ] . TimeRange , time . Duration ( timeSeriesQuery . IntervalMS / 1000 ) * time . Second , req . Queries [ 0 ] . MaxDataPoints )
graphPeriod = interval . Text
}
return fmt . Sprintf ( " | graph_period %s" , graphPeriod )
}
return ""
}
2021-10-08 07:46:35 -05:00
func ( timeSeriesQuery cloudMonitoringTimeSeriesQuery ) run ( ctx context . Context , req * backend . QueryDataRequest ,
2022-01-20 04:10:12 -06:00
s * Service , dsInfo datasourceInfo , tracer tracing . Tracer ) ( * backend . DataResponse , cloudMonitoringResponse , string , error ) {
2021-10-08 07:46:35 -05:00
dr := & backend . DataResponse { }
2021-01-18 06:48:43 -06:00
projectName := timeSeriesQuery . ProjectName
2021-10-08 07:46:35 -05:00
2021-01-18 06:48:43 -06:00
if projectName == "" {
2021-10-08 07:46:35 -05:00
var err error
projectName , err = s . getDefaultProject ( ctx , dsInfo )
2021-01-18 06:48:43 -06:00
if err != nil {
2021-10-08 07:46:35 -05:00
dr . Error = err
return dr , cloudMonitoringResponse { } , "" , nil
2021-01-18 06:48:43 -06:00
}
slog . Info ( "No project name set on query, using project name from datasource" , "projectName" , projectName )
}
2022-05-09 06:43:10 -05:00
timeSeriesQuery . Query += timeSeriesQuery . appendGraphPeriod ( req )
2021-10-08 07:46:35 -05:00
from := req . Queries [ 0 ] . TimeRange . From
to := req . Queries [ 0 ] . TimeRange . To
2021-01-18 06:48:43 -06:00
timeFormat := "2006/01/02-15:04:05"
2022-05-09 06:43:10 -05:00
timeSeriesQuery . Query += fmt . Sprintf ( " | within d'%s', d'%s'" , from . UTC ( ) . Format ( timeFormat ) , to . UTC ( ) . Format ( timeFormat ) )
2021-01-18 06:48:43 -06:00
buf , err := json . Marshal ( map [ string ] interface { } {
"query" : timeSeriesQuery . Query ,
} )
if err != nil {
2021-10-08 07:46:35 -05:00
dr . Error = err
return dr , cloudMonitoringResponse { } , "" , nil
2021-01-18 06:48:43 -06:00
}
2021-11-01 04:53:33 -05:00
r , err := s . createRequest ( ctx , & dsInfo , path . Join ( "/v3/projects" , projectName , "timeSeries:query" ) , bytes . NewBuffer ( buf ) )
2021-01-18 06:48:43 -06:00
if err != nil {
2021-10-08 07:46:35 -05:00
dr . Error = err
return dr , cloudMonitoringResponse { } , "" , nil
2021-01-18 06:48:43 -06:00
}
2022-01-20 04:10:12 -06:00
ctx , span := tracer . Start ( ctx , "cloudMonitoring MQL query" )
span . SetAttributes ( "query" , timeSeriesQuery . Query , attribute . Key ( "query" ) . String ( timeSeriesQuery . Query ) )
span . SetAttributes ( "from" , req . Queries [ 0 ] . TimeRange . From , attribute . Key ( "from" ) . String ( req . Queries [ 0 ] . TimeRange . From . String ( ) ) )
span . SetAttributes ( "until" , req . Queries [ 0 ] . TimeRange . To , attribute . Key ( "until" ) . String ( req . Queries [ 0 ] . TimeRange . To . String ( ) ) )
2021-01-18 06:48:43 -06:00
2022-01-20 04:10:12 -06:00
defer span . End ( )
tracer . Inject ( ctx , r . Header , span )
2021-01-18 06:48:43 -06:00
2021-10-08 07:46:35 -05:00
r = r . WithContext ( ctx )
2021-11-02 09:37:02 -05:00
res , err := dsInfo . services [ cloudMonitor ] . client . Do ( r )
2021-01-18 06:48:43 -06:00
if err != nil {
2021-10-08 07:46:35 -05:00
dr . Error = err
return dr , cloudMonitoringResponse { } , "" , nil
2021-01-18 06:48:43 -06:00
}
2021-10-08 07:46:35 -05:00
d , err := unmarshalResponse ( res )
2021-01-18 06:48:43 -06:00
if err != nil {
2021-10-08 07:46:35 -05:00
dr . Error = err
return dr , cloudMonitoringResponse { } , "" , nil
2021-01-18 06:48:43 -06:00
}
2021-10-08 07:46:35 -05:00
return dr , d , timeSeriesQuery . Query , nil
2021-01-18 06:48:43 -06:00
}
2021-10-08 07:46:35 -05:00
func ( timeSeriesQuery cloudMonitoringTimeSeriesQuery ) parseResponse ( queryRes * backend . DataResponse ,
2021-03-08 00:02:49 -06:00
response cloudMonitoringResponse , executedQueryString string ) error {
2021-01-18 06:48:43 -06:00
frames := data . Frames { }
2021-10-08 07:46:35 -05:00
2021-01-18 06:48:43 -06:00
for _ , series := range response . TimeSeriesData {
seriesLabels := make ( map [ string ] string )
frame := data . NewFrameOfFieldTypes ( "" , len ( series . PointData ) , data . FieldTypeTime , data . FieldTypeFloat64 )
frame . RefID = timeSeriesQuery . RefID
frame . Meta = & data . FrameMeta {
ExecutedQueryString : executedQueryString ,
}
2022-01-26 05:02:42 -06:00
labels := make ( map [ string ] string )
2021-01-18 06:48:43 -06:00
for n , d := range response . TimeSeriesDescriptor . LabelDescriptors {
key := toSnakeCase ( d . Key )
key = strings . Replace ( key , "." , ".label." , 1 )
labelValue := series . LabelValues [ n ]
switch d . ValueType {
case "BOOL" :
strVal := strconv . FormatBool ( labelValue . BoolValue )
2022-01-26 05:02:42 -06:00
labels [ key ] = strVal
2021-01-18 06:48:43 -06:00
seriesLabels [ key ] = strVal
case "INT64" :
2022-01-26 05:02:42 -06:00
labels [ key ] = labelValue . Int64Value
2021-09-22 10:05:36 -05:00
seriesLabels [ key ] = labelValue . Int64Value
2021-01-18 06:48:43 -06:00
default :
2022-01-26 05:02:42 -06:00
labels [ key ] = labelValue . StringValue
2021-01-18 06:48:43 -06:00
seriesLabels [ key ] = labelValue . StringValue
}
}
for n , d := range response . TimeSeriesDescriptor . PointDescriptors {
2021-11-05 07:09:22 -05:00
// If more than 1 pointdescriptor was returned, three aggregations are returned per time series - min, mean and max.
// This is a because the period for the given table is less than half the duration which is used in the graph_period MQL function.
// See https://cloud.google.com/monitoring/mql/reference#graph_period-tabop
// When this is the case, we'll just ignore the min and max and use the mean value in the frame
if len ( response . TimeSeriesDescriptor . PointDescriptors ) > 1 && ! strings . HasSuffix ( d . Key , ".mean" ) {
continue
}
2022-01-26 05:02:42 -06:00
labels [ "metric.name" ] = d . Key
2021-01-18 06:48:43 -06:00
seriesLabels [ "metric.name" ] = d . Key
defaultMetricName := d . Key
// process non-distribution series
if d . ValueType != "DISTRIBUTION" {
// reverse the order to be ascending
for i := len ( series . PointData ) - 1 ; i >= 0 ; i -- {
point := series . PointData [ i ]
value := point . Values [ n ] . DoubleValue
if d . ValueType == "INT64" {
parsedValue , err := strconv . ParseFloat ( point . Values [ n ] . Int64Value , 64 )
if err == nil {
value = parsedValue
}
} else if d . ValueType == "BOOL" {
if point . Values [ n ] . BoolValue {
value = 1
} else {
value = 0
}
}
frame . SetRow ( len ( series . PointData ) - 1 - i , series . PointData [ i ] . TimeInterval . EndTime , value )
}
2021-03-08 00:02:49 -06:00
metricName := formatLegendKeys ( d . Key , defaultMetricName , seriesLabels , nil ,
& cloudMonitoringTimeSeriesFilter {
ProjectName : timeSeriesQuery . ProjectName , AliasBy : timeSeriesQuery . AliasBy ,
} )
2021-01-18 06:48:43 -06:00
dataField := frame . Fields [ 1 ]
dataField . Name = metricName
2021-01-21 08:08:57 -06:00
dataField . Labels = seriesLabels
setDisplayNameAsFieldName ( dataField )
2021-01-18 06:48:43 -06:00
frames = append ( frames , frame )
continue
}
// process distribution series
buckets := make ( map [ int ] * data . Frame )
// reverse the order to be ascending
for i := len ( series . PointData ) - 1 ; i >= 0 ; i -- {
point := series . PointData [ i ]
if len ( point . Values [ n ] . DistributionValue . BucketCounts ) == 0 {
continue
}
maxKey := 0
for i := 0 ; i < len ( point . Values [ n ] . DistributionValue . BucketCounts ) ; i ++ {
value , err := strconv . ParseFloat ( point . Values [ n ] . DistributionValue . BucketCounts [ i ] , 64 )
if err != nil {
continue
}
if _ , ok := buckets [ i ] ; ! ok {
// set lower bounds
// https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
bucketBound := calcBucketBound ( point . Values [ n ] . DistributionValue . BucketOptions , i )
additionalLabels := map [ string ] string { "bucket" : bucketBound }
timeField := data . NewField ( data . TimeSeriesTimeFieldName , nil , [ ] time . Time { } )
valueField := data . NewField ( data . TimeSeriesValueFieldName , nil , [ ] float64 { } )
frameName := formatLegendKeys ( d . Key , defaultMetricName , nil , additionalLabels , & cloudMonitoringTimeSeriesFilter { ProjectName : timeSeriesQuery . ProjectName , AliasBy : timeSeriesQuery . AliasBy } )
valueField . Name = frameName
2021-01-21 08:08:57 -06:00
valueField . Labels = seriesLabels
setDisplayNameAsFieldName ( valueField )
2021-01-18 06:48:43 -06:00
buckets [ i ] = & data . Frame {
Name : frameName ,
Fields : [ ] * data . Field {
timeField ,
valueField ,
} ,
RefID : timeSeriesQuery . RefID ,
}
if maxKey < i {
maxKey = i
}
}
buckets [ i ] . AppendRow ( point . TimeInterval . EndTime , value )
}
// fill empty bucket
for i := 0 ; i < maxKey ; i ++ {
if _ , ok := buckets [ i ] ; ! ok {
bucketBound := calcBucketBound ( point . Values [ n ] . DistributionValue . BucketOptions , i )
additionalLabels := data . Labels { "bucket" : bucketBound }
timeField := data . NewField ( data . TimeSeriesTimeFieldName , nil , [ ] time . Time { } )
valueField := data . NewField ( data . TimeSeriesValueFieldName , nil , [ ] float64 { } )
frameName := formatLegendKeys ( d . Key , defaultMetricName , seriesLabels , additionalLabels , & cloudMonitoringTimeSeriesFilter { ProjectName : timeSeriesQuery . ProjectName , AliasBy : timeSeriesQuery . AliasBy } )
valueField . Name = frameName
2021-01-21 08:08:57 -06:00
valueField . Labels = seriesLabels
setDisplayNameAsFieldName ( valueField )
2021-01-18 06:48:43 -06:00
buckets [ i ] = & data . Frame {
Name : frameName ,
Fields : [ ] * data . Field {
timeField ,
valueField ,
} ,
RefID : timeSeriesQuery . RefID ,
}
}
}
}
for i := 0 ; i < len ( buckets ) ; i ++ {
frames = append ( frames , buckets [ i ] )
}
}
2021-10-08 07:46:35 -05:00
2022-01-26 05:02:42 -06:00
customFrameMeta := map [ string ] interface { } { }
customFrameMeta [ "labels" ] = labels
2021-10-08 07:46:35 -05:00
if frame . Meta != nil {
frame . Meta . Custom = customFrameMeta
} else {
frame . SetMeta ( & data . FrameMeta { Custom : customFrameMeta } )
}
}
2022-01-26 05:02:42 -06:00
if len ( response . TimeSeriesData ) > 0 {
dl := timeSeriesQuery . buildDeepLink ( )
frames = addConfigData ( frames , dl , response . Unit )
}
2021-01-18 06:48:43 -06:00
2021-10-08 07:46:35 -05:00
queryRes . Frames = frames
2021-01-18 06:48:43 -06:00
return nil
}
2021-10-08 07:46:35 -05:00
func ( timeSeriesQuery cloudMonitoringTimeSeriesQuery ) parseToAnnotations ( queryRes * backend . DataResponse ,
2021-11-11 06:38:04 -06:00
data cloudMonitoringResponse , title , text string ) error {
2022-05-19 15:52:52 -05:00
annotations := make ( [ ] * annotationEvent , 0 )
2021-01-18 06:48:43 -06:00
for _ , series := range data . TimeSeriesData {
metricLabels := make ( map [ string ] string )
resourceLabels := make ( map [ string ] string )
for n , d := range data . TimeSeriesDescriptor . LabelDescriptors {
key := toSnakeCase ( d . Key )
labelValue := series . LabelValues [ n ]
value := ""
switch d . ValueType {
case "BOOL" :
strVal := strconv . FormatBool ( labelValue . BoolValue )
value = strVal
case "INT64" :
2021-09-22 10:05:36 -05:00
value = labelValue . Int64Value
2021-01-18 06:48:43 -06:00
default :
value = labelValue . StringValue
}
if strings . Index ( key , "metric." ) == 0 {
key = key [ len ( "metric." ) : ]
metricLabels [ key ] = value
} else if strings . Index ( key , "resource." ) == 0 {
key = key [ len ( "resource." ) : ]
resourceLabels [ key ] = value
}
}
for n , d := range data . TimeSeriesDescriptor . PointDescriptors {
// reverse the order to be ascending
for i := len ( series . PointData ) - 1 ; i >= 0 ; i -- {
point := series . PointData [ i ]
value := strconv . FormatFloat ( point . Values [ n ] . DoubleValue , 'f' , 6 , 64 )
if d . ValueType == "STRING" {
value = point . Values [ n ] . StringValue
}
2022-05-19 15:52:52 -05:00
annotations = append ( annotations , & annotationEvent {
Time : point . TimeInterval . EndTime ,
Title : formatAnnotationText ( title , value , d . MetricKind , metricLabels , resourceLabels ) ,
Tags : "" ,
Text : formatAnnotationText ( text , value , d . MetricKind , metricLabels , resourceLabels ) ,
} )
2021-01-18 06:48:43 -06:00
}
}
}
2021-10-08 07:46:35 -05:00
timeSeriesQuery . transformAnnotationToFrame ( annotations , queryRes )
2021-01-18 06:48:43 -06:00
return nil
}
func ( timeSeriesQuery cloudMonitoringTimeSeriesQuery ) buildDeepLink ( ) string {
u , err := url . Parse ( "https://console.cloud.google.com/monitoring/metrics-explorer" )
if err != nil {
slog . Error ( "Failed to generate deep link: unable to parse metrics explorer URL" , "projectName" , timeSeriesQuery . ProjectName , "query" , timeSeriesQuery . RefID )
return ""
}
q := u . Query ( )
q . Set ( "project" , timeSeriesQuery . ProjectName )
q . Set ( "Grafana_deeplink" , "true" )
pageState := map [ string ] interface { } {
"xyChart" : map [ string ] interface { } {
"constantLines" : [ ] string { } ,
"dataSets" : [ ] map [ string ] interface { } {
{
"timeSeriesQuery" : timeSeriesQuery . Query ,
"targetAxis" : "Y1" ,
"plotType" : "LINE" ,
} ,
} ,
"timeshiftDuration" : "0s" ,
"y1Axis" : map [ string ] string {
"label" : "y1Axis" ,
"scale" : "LINEAR" ,
} ,
} ,
"timeSelection" : map [ string ] string {
"timeRange" : "custom" ,
2021-10-08 07:46:35 -05:00
"start" : timeSeriesQuery . timeRange . From . Format ( time . RFC3339Nano ) ,
"end" : timeSeriesQuery . timeRange . To . Format ( time . RFC3339Nano ) ,
2021-01-18 06:48:43 -06:00
} ,
}
blob , err := json . Marshal ( pageState )
if err != nil {
slog . Error ( "Failed to generate deep link" , "pageState" , pageState , "ProjectName" , timeSeriesQuery . ProjectName , "query" , timeSeriesQuery . RefID )
return ""
}
q . Set ( "pageState" , string ( blob ) )
u . RawQuery = q . Encode ( )
accountChooserURL , err := url . Parse ( "https://accounts.google.com/AccountChooser" )
if err != nil {
slog . Error ( "Failed to generate deep link: unable to parse account chooser URL" , "ProjectName" , timeSeriesQuery . ProjectName , "query" , timeSeriesQuery . RefID )
return ""
}
accountChooserQuery := accountChooserURL . Query ( )
accountChooserQuery . Set ( "continue" , u . String ( ) )
accountChooserURL . RawQuery = accountChooserQuery . Encode ( )
return accountChooserURL . String ( )
}
func ( timeSeriesQuery cloudMonitoringTimeSeriesQuery ) getRefID ( ) string {
return timeSeriesQuery . RefID
}