2020-04-27 10:43:02 -05:00
package azuremonitor
import (
2020-05-11 12:11:03 -05:00
"bytes"
"compress/gzip"
2020-04-27 10:43:02 -05:00
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"path"
2021-05-20 03:16:29 -05:00
"regexp"
2021-06-07 07:54:51 -05:00
"time"
2020-04-27 10:43:02 -05:00
2021-06-07 07:54:51 -05:00
"github.com/grafana/grafana-plugin-sdk-go/backend"
2020-06-05 11:32:10 -05:00
"github.com/grafana/grafana-plugin-sdk-go/data"
2020-04-27 10:43:02 -05:00
"github.com/grafana/grafana/pkg/components/simplejson"
2022-01-20 04:10:12 -06:00
"github.com/grafana/grafana/pkg/infra/tracing"
2020-04-27 10:43:02 -05:00
"github.com/grafana/grafana/pkg/util/errutil"
2022-01-20 04:10:12 -06:00
"go.opentelemetry.io/otel/attribute"
2020-04-27 10:43:02 -05:00
"golang.org/x/net/context/ctxhttp"
)
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
2021-07-16 05:47:26 -05:00
type AzureLogAnalyticsDatasource struct {
proxy serviceProxy
}
2020-04-27 10:43:02 -05:00
// AzureLogAnalyticsQuery is the query request that is built from the saved values for
// from the UI
type AzureLogAnalyticsQuery struct {
RefID string
ResultFormat string
URL string
2021-06-07 07:54:51 -05:00
JSON json . RawMessage
2020-04-27 10:43:02 -05:00
Params url . Values
Target string
2021-06-07 07:54:51 -05:00
TimeRange backend . TimeRange
2020-04-27 10:43:02 -05:00
}
2021-07-16 05:47:26 -05:00
func ( e * AzureLogAnalyticsDatasource ) resourceRequest ( rw http . ResponseWriter , req * http . Request , cli * http . Client ) {
e . proxy . Do ( rw , req , cli )
}
2020-04-27 10:43:02 -05:00
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
2021-06-07 07:54:51 -05:00
// 3. parses the responses for each query into data frames
2022-01-20 04:10:12 -06:00
func ( e * AzureLogAnalyticsDatasource ) executeTimeSeriesQuery ( ctx context . Context , originalQueries [ ] backend . DataQuery , dsInfo datasourceInfo , client * http . Client ,
url string , tracer tracing . Tracer ) ( * backend . QueryDataResponse , error ) {
2021-06-07 07:54:51 -05:00
result := backend . NewQueryDataResponse ( )
2020-04-27 10:43:02 -05:00
2021-06-07 07:54:51 -05:00
queries , err := e . buildQueries ( originalQueries , dsInfo )
2020-04-27 10:43:02 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
return nil , err
2020-04-27 10:43:02 -05:00
}
for _ , query := range queries {
2022-01-20 04:10:12 -06:00
result . Responses [ query . RefID ] = e . executeQuery ( ctx , query , dsInfo , client , url , tracer )
2020-04-27 10:43:02 -05:00
}
return result , nil
}
2021-05-20 03:16:29 -05:00
func getApiURL ( queryJSONModel logJSONQuery ) string {
// Legacy queries only specify a Workspace GUID, which we need to use the old workspace-centric
// API URL for, and newer queries specifying a resource URI should use resource-centric API.
// However, legacy workspace queries using a `workspaces()` template variable will be resolved
// to a resource URI, so they should use the new resource-centric.
azureLogAnalyticsTarget := queryJSONModel . AzureLogAnalytics
var resourceOrWorkspace string
if azureLogAnalyticsTarget . Resource != "" {
resourceOrWorkspace = azureLogAnalyticsTarget . Resource
} else {
resourceOrWorkspace = azureLogAnalyticsTarget . Workspace
}
matchesResourceURI , _ := regexp . MatchString ( "^/subscriptions/" , resourceOrWorkspace )
if matchesResourceURI {
return fmt . Sprintf ( "v1%s/query" , resourceOrWorkspace )
} else {
return fmt . Sprintf ( "v1/workspaces/%s/query" , resourceOrWorkspace )
}
}
2021-06-07 07:54:51 -05:00
func ( e * AzureLogAnalyticsDatasource ) buildQueries ( queries [ ] backend . DataQuery , dsInfo datasourceInfo ) ( [ ] * AzureLogAnalyticsQuery , error ) {
2020-04-27 10:43:02 -05:00
azureLogAnalyticsQueries := [ ] * AzureLogAnalyticsQuery { }
for _ , query := range queries {
2020-05-26 09:52:33 -05:00
queryJSONModel := logJSONQuery { }
2021-06-07 07:54:51 -05:00
err := json . Unmarshal ( query . JSON , & queryJSONModel )
2020-05-26 09:52:33 -05:00
if err != nil {
return nil , fmt . Errorf ( "failed to decode the Azure Log Analytics query object from JSON: %w" , err )
}
azureLogAnalyticsTarget := queryJSONModel . AzureLogAnalytics
2020-04-27 10:43:02 -05:00
azlog . Debug ( "AzureLogAnalytics" , "target" , azureLogAnalyticsTarget )
2020-05-26 09:52:33 -05:00
resultFormat := azureLogAnalyticsTarget . ResultFormat
2020-04-27 10:43:02 -05:00
if resultFormat == "" {
2021-05-19 03:31:27 -05:00
resultFormat = timeSeries
2020-04-27 10:43:02 -05:00
}
2021-05-20 03:16:29 -05:00
apiURL := getApiURL ( queryJSONModel )
2020-04-27 10:43:02 -05:00
params := url . Values { }
2021-06-07 07:54:51 -05:00
rawQuery , err := KqlInterpolate ( query , dsInfo , azureLogAnalyticsTarget . Query , "TimeGenerated" )
2020-04-27 10:43:02 -05:00
if err != nil {
return nil , err
}
params . Add ( "query" , rawQuery )
azureLogAnalyticsQueries = append ( azureLogAnalyticsQueries , & AzureLogAnalyticsQuery {
2021-03-08 00:02:49 -06:00
RefID : query . RefID ,
2020-04-27 10:43:02 -05:00
ResultFormat : resultFormat ,
URL : apiURL ,
2021-06-07 07:54:51 -05:00
JSON : query . JSON ,
2020-04-27 10:43:02 -05:00
Params : params ,
Target : params . Encode ( ) ,
2021-06-07 07:54:51 -05:00
TimeRange : query . TimeRange ,
2020-04-27 10:43:02 -05:00
} )
}
return azureLogAnalyticsQueries , nil
}
2022-01-20 04:10:12 -06:00
func ( e * AzureLogAnalyticsDatasource ) executeQuery ( ctx context . Context , query * AzureLogAnalyticsQuery , dsInfo datasourceInfo , client * http . Client ,
url string , tracer tracing . Tracer ) backend . DataResponse {
2021-06-07 07:54:51 -05:00
dataResponse := backend . DataResponse { }
2020-04-27 10:43:02 -05:00
2021-06-07 07:54:51 -05:00
dataResponseErrorWithExecuted := func ( err error ) backend . DataResponse {
dataResponse . Error = err
dataResponse . Frames = data . Frames {
2020-06-30 15:05:53 -05:00
& data . Frame {
RefID : query . RefID ,
Meta : & data . FrameMeta {
ExecutedQueryString : query . Params . Get ( "query" ) ,
} ,
} ,
}
2021-06-07 07:54:51 -05:00
return dataResponse
2020-06-05 11:32:10 -05:00
}
2021-07-14 02:53:24 -05:00
// If azureLogAnalyticsSameAs is defined and set to false, return an error
if sameAs , ok := dsInfo . JSONData [ "azureLogAnalyticsSameAs" ] ; ok && ! sameAs . ( bool ) {
return dataResponseErrorWithExecuted ( fmt . Errorf ( "Log Analytics credentials are no longer supported. Go to the data source configuration to update Azure Monitor credentials" ) ) //nolint:golint,stylecheck
}
2021-07-16 05:47:26 -05:00
req , err := e . createRequest ( ctx , dsInfo , url )
2020-04-27 10:43:02 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
dataResponse . Error = err
return dataResponse
2020-04-27 10:43:02 -05:00
}
req . URL . Path = path . Join ( req . URL . Path , query . URL )
req . URL . RawQuery = query . Params . Encode ( )
2022-01-20 04:10:12 -06:00
ctx , span := tracer . Start ( ctx , "azure log analytics query" )
span . SetAttributes ( "target" , query . Target , attribute . Key ( "target" ) . String ( query . Target ) )
span . SetAttributes ( "from" , query . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) , attribute . Key ( "from" ) . Int64 ( query . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) ) )
span . SetAttributes ( "until" , query . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) , attribute . Key ( "until" ) . Int64 ( query . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) ) )
span . SetAttributes ( "datasource_id" , dsInfo . DatasourceID , attribute . Key ( "datasource_id" ) . Int64 ( dsInfo . DatasourceID ) )
span . SetAttributes ( "org_id" , dsInfo . OrgID , attribute . Key ( "org_id" ) . Int64 ( dsInfo . OrgID ) )
2020-04-27 10:43:02 -05:00
2022-01-20 04:10:12 -06:00
defer span . End ( )
2020-04-27 10:43:02 -05:00
2022-01-20 04:10:12 -06:00
tracer . Inject ( ctx , req . Header , span )
2020-04-27 10:43:02 -05:00
azlog . Debug ( "AzureLogAnalytics" , "Request ApiURL" , req . URL . String ( ) )
2021-07-16 05:47:26 -05:00
res , err := ctxhttp . Do ( ctx , client , req )
2020-04-27 10:43:02 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
return dataResponseErrorWithExecuted ( err )
2020-04-27 10:43:02 -05:00
}
2020-06-05 11:32:10 -05:00
logResponse , err := e . unmarshalResponse ( res )
2020-04-27 10:43:02 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
return dataResponseErrorWithExecuted ( err )
2020-04-27 10:43:02 -05:00
}
2020-06-05 11:32:10 -05:00
t , err := logResponse . GetPrimaryResultTable ( )
if err != nil {
2021-06-07 07:54:51 -05:00
return dataResponseErrorWithExecuted ( err )
2020-06-05 11:32:10 -05:00
}
2020-04-27 10:43:02 -05:00
2021-05-19 03:31:27 -05:00
frame , err := ResponseTableToFrame ( t )
2020-06-05 11:32:10 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
return dataResponseErrorWithExecuted ( err )
}
model , err := simplejson . NewJson ( query . JSON )
if err != nil {
return dataResponseErrorWithExecuted ( err )
2020-04-27 10:43:02 -05:00
}
2020-06-29 15:20:24 -05:00
err = setAdditionalFrameMeta ( frame ,
2020-06-05 11:32:10 -05:00
query . Params . Get ( "query" ) ,
2021-06-07 07:54:51 -05:00
model . Get ( "subscriptionId" ) . MustString ( ) ,
model . Get ( "azureLogAnalytics" ) . Get ( "workspace" ) . MustString ( ) )
2020-06-29 15:20:24 -05:00
if err != nil {
frame . AppendNotices ( data . Notice { Severity : data . NoticeSeverityWarning , Text : "could not add custom metadata: " + err . Error ( ) } )
azlog . Warn ( "failed to add custom metadata to azure log analytics response" , err )
}
2020-06-05 11:32:10 -05:00
2021-05-19 03:31:27 -05:00
if query . ResultFormat == timeSeries {
2020-06-05 11:32:10 -05:00
tsSchema := frame . TimeSeriesSchema ( )
if tsSchema . Type == data . TimeSeriesTypeLong {
2020-08-19 09:42:54 -05:00
wideFrame , err := data . LongToWide ( frame , nil )
2020-06-05 11:32:10 -05:00
if err == nil {
frame = wideFrame
} else {
frame . AppendNotices ( data . Notice { Severity : data . NoticeSeverityWarning , Text : "could not convert frame to time series, returning raw table: " + err . Error ( ) } )
}
}
}
2021-06-29 03:39:28 -05:00
2021-06-07 07:54:51 -05:00
dataResponse . Frames = data . Frames { frame }
return dataResponse
2020-04-27 10:43:02 -05:00
}
2021-07-16 05:47:26 -05:00
func ( e * AzureLogAnalyticsDatasource ) createRequest ( ctx context . Context , dsInfo datasourceInfo , url string ) ( * http . Request , error ) {
req , err := http . NewRequest ( http . MethodGet , url , nil )
2020-04-27 10:43:02 -05:00
if err != nil {
azlog . Debug ( "Failed to create request" , "error" , err )
2020-11-05 04:29:39 -06:00
return nil , errutil . Wrap ( "failed to create request" , err )
2020-04-27 10:43:02 -05:00
}
2021-06-11 10:02:24 -05:00
req . URL . Path = "/"
2020-04-27 10:43:02 -05:00
req . Header . Set ( "Content-Type" , "application/json" )
return req , nil
}
2020-06-05 11:32:10 -05:00
// GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an
// error if there is no table by that name.
2021-05-19 03:31:27 -05:00
func ( ar * AzureLogAnalyticsResponse ) GetPrimaryResultTable ( ) ( * AzureResponseTable , error ) {
2020-06-05 11:32:10 -05:00
for _ , t := range ar . Tables {
if t . Name == "PrimaryResult" {
return & t , nil
}
}
2020-09-22 13:00:59 -05:00
return nil , fmt . Errorf ( "no data as PrimaryResult table is missing from the response" )
2020-06-05 11:32:10 -05:00
}
2020-04-27 10:43:02 -05:00
func ( e * AzureLogAnalyticsDatasource ) unmarshalResponse ( res * http . Response ) ( AzureLogAnalyticsResponse , error ) {
body , err := ioutil . ReadAll ( res . Body )
if err != nil {
return AzureLogAnalyticsResponse { } , err
}
2020-12-15 02:32:06 -06:00
defer func ( ) {
if err := res . Body . Close ( ) ; err != nil {
azlog . Warn ( "Failed to close response body" , "err" , err )
}
} ( )
2020-04-27 10:43:02 -05:00
if res . StatusCode / 100 != 2 {
azlog . Debug ( "Request failed" , "status" , res . Status , "body" , string ( body ) )
2020-11-05 04:29:39 -06:00
return AzureLogAnalyticsResponse { } , fmt . Errorf ( "request failed, status: %s, body: %s" , res . Status , string ( body ) )
2020-04-27 10:43:02 -05:00
}
var data AzureLogAnalyticsResponse
2020-06-05 11:32:10 -05:00
d := json . NewDecoder ( bytes . NewReader ( body ) )
d . UseNumber ( )
err = d . Decode ( & data )
2020-04-27 10:43:02 -05:00
if err != nil {
azlog . Debug ( "Failed to unmarshal Azure Log Analytics response" , "error" , err , "status" , res . Status , "body" , string ( body ) )
return AzureLogAnalyticsResponse { } , err
}
return data , nil
}
2020-06-29 15:20:24 -05:00
// LogAnalyticsMeta is a type for the a Frame's Meta's Custom property.
type LogAnalyticsMeta struct {
ColumnTypes [ ] string ` json:"azureColumnTypes" `
Subscription string ` json:"subscription" `
Workspace string ` json:"workspace" `
EncodedQuery [ ] byte ` json:"encodedQuery" ` // EncodedQuery is used for deep links.
}
func setAdditionalFrameMeta ( frame * data . Frame , query , subscriptionID , workspace string ) error {
2020-06-05 11:32:10 -05:00
frame . Meta . ExecutedQueryString = query
2020-06-29 15:20:24 -05:00
la , ok := frame . Meta . Custom . ( * LogAnalyticsMeta )
if ! ok {
return fmt . Errorf ( "unexpected type found for frame's custom metadata" )
}
la . Subscription = subscriptionID
la . Workspace = workspace
2020-06-05 11:32:10 -05:00
encodedQuery , err := encodeQuery ( query )
if err == nil {
2020-06-29 15:20:24 -05:00
la . EncodedQuery = encodedQuery
return nil
2020-04-27 10:43:02 -05:00
}
2020-06-29 15:20:24 -05:00
return fmt . Errorf ( "failed to encode the query into the encodedQuery property" )
2020-04-27 10:43:02 -05:00
}
2020-05-11 12:11:03 -05:00
2020-06-05 11:32:10 -05:00
// encodeQuery encodes the query in gzip so the frontend can build links.
func encodeQuery ( rawQuery string ) ( [ ] byte , error ) {
2020-05-11 12:11:03 -05:00
var b bytes . Buffer
gz := gzip . NewWriter ( & b )
if _ , err := gz . Write ( [ ] byte ( rawQuery ) ) ; err != nil {
2020-06-05 11:32:10 -05:00
return nil , err
2020-05-11 12:11:03 -05:00
}
if err := gz . Close ( ) ; err != nil {
2020-06-05 11:32:10 -05:00
return nil , err
2020-05-11 12:11:03 -05:00
}
2020-06-05 11:32:10 -05:00
return b . Bytes ( ) , nil
2020-05-11 12:11:03 -05:00
}