2022-03-02 08:41:07 -06:00
package loganalytics
2020-04-27 10:43:02 -05:00
import (
2020-05-11 12:11:03 -05:00
"bytes"
"compress/gzip"
2020-04-27 10:43:02 -05:00
"context"
2023-03-29 11:01:18 -05:00
"encoding/base64"
2020-04-27 10:43:02 -05:00
"encoding/json"
"fmt"
2022-08-10 08:37:51 -05:00
"io"
2020-04-27 10:43:02 -05:00
"net/http"
2023-03-29 11:01:18 -05:00
"net/url"
2020-04-27 10:43:02 -05:00
"path"
2021-05-20 03:16:29 -05:00
"regexp"
2021-06-07 07:54:51 -05:00
"time"
2020-04-27 10:43:02 -05:00
2021-06-07 07:54:51 -05:00
"github.com/grafana/grafana-plugin-sdk-go/backend"
2020-06-05 11:32:10 -05:00
"github.com/grafana/grafana-plugin-sdk-go/data"
2022-04-11 13:20:10 -05:00
"go.opentelemetry.io/otel/attribute"
2022-11-04 08:28:38 -05:00
"github.com/grafana/grafana/pkg/infra/log"
2022-01-20 04:10:12 -06:00
"github.com/grafana/grafana/pkg/infra/tracing"
2022-03-02 08:41:07 -06:00
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
2020-04-27 10:43:02 -05:00
)
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
2021-07-16 05:47:26 -05:00
type AzureLogAnalyticsDatasource struct {
2022-03-02 08:41:07 -06:00
Proxy types . ServiceProxy
2021-07-16 05:47:26 -05:00
}
2020-04-27 10:43:02 -05:00
// AzureLogAnalyticsQuery is the query request that is built from the saved values for
// from the UI
type AzureLogAnalyticsQuery struct {
RefID string
ResultFormat string
URL string
2021-06-07 07:54:51 -05:00
JSON json . RawMessage
TimeRange backend . TimeRange
2023-01-12 10:25:13 -06:00
Query string
Resources [ ] string
2020-04-27 10:43:02 -05:00
}
2022-03-02 08:41:07 -06:00
func ( e * AzureLogAnalyticsDatasource ) ResourceRequest ( rw http . ResponseWriter , req * http . Request , cli * http . Client ) {
e . Proxy . Do ( rw , req , cli )
2021-07-16 05:47:26 -05:00
}
2020-04-27 10:43:02 -05:00
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
2021-06-07 07:54:51 -05:00
// 3. parses the responses for each query into data frames
2022-11-04 08:28:38 -05:00
func ( e * AzureLogAnalyticsDatasource ) ExecuteTimeSeriesQuery ( ctx context . Context , logger log . Logger , originalQueries [ ] backend . DataQuery , dsInfo types . DatasourceInfo , client * http . Client , url string , tracer tracing . Tracer ) ( * backend . QueryDataResponse , error ) {
2021-06-07 07:54:51 -05:00
result := backend . NewQueryDataResponse ( )
2022-11-04 08:28:38 -05:00
ctxLogger := logger . FromContext ( ctx )
queries , err := e . buildQueries ( ctxLogger , originalQueries , dsInfo )
2020-04-27 10:43:02 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
return nil , err
2020-04-27 10:43:02 -05:00
}
for _ , query := range queries {
2022-11-04 08:28:38 -05:00
result . Responses [ query . RefID ] = e . executeQuery ( ctx , ctxLogger , query , dsInfo , client , url , tracer )
2020-04-27 10:43:02 -05:00
}
return result , nil
}
2022-03-02 08:41:07 -06:00
func getApiURL ( queryJSONModel types . LogJSONQuery ) string {
2021-05-20 03:16:29 -05:00
// Legacy queries only specify a Workspace GUID, which we need to use the old workspace-centric
// API URL for, and newer queries specifying a resource URI should use resource-centric API.
// However, legacy workspace queries using a `workspaces()` template variable will be resolved
// to a resource URI, so they should use the new resource-centric.
azureLogAnalyticsTarget := queryJSONModel . AzureLogAnalytics
var resourceOrWorkspace string
2023-01-12 10:25:13 -06:00
if len ( azureLogAnalyticsTarget . Resources ) > 0 {
resourceOrWorkspace = azureLogAnalyticsTarget . Resources [ 0 ]
} else if azureLogAnalyticsTarget . Resource != "" {
2021-05-20 03:16:29 -05:00
resourceOrWorkspace = azureLogAnalyticsTarget . Resource
} else {
resourceOrWorkspace = azureLogAnalyticsTarget . Workspace
}
matchesResourceURI , _ := regexp . MatchString ( "^/subscriptions/" , resourceOrWorkspace )
if matchesResourceURI {
return fmt . Sprintf ( "v1%s/query" , resourceOrWorkspace )
} else {
return fmt . Sprintf ( "v1/workspaces/%s/query" , resourceOrWorkspace )
}
}
2022-11-04 08:28:38 -05:00
func ( e * AzureLogAnalyticsDatasource ) buildQueries ( logger log . Logger , queries [ ] backend . DataQuery , dsInfo types . DatasourceInfo ) ( [ ] * AzureLogAnalyticsQuery , error ) {
2020-04-27 10:43:02 -05:00
azureLogAnalyticsQueries := [ ] * AzureLogAnalyticsQuery { }
for _ , query := range queries {
2022-03-02 08:41:07 -06:00
queryJSONModel := types . LogJSONQuery { }
2021-06-07 07:54:51 -05:00
err := json . Unmarshal ( query . JSON , & queryJSONModel )
2020-05-26 09:52:33 -05:00
if err != nil {
return nil , fmt . Errorf ( "failed to decode the Azure Log Analytics query object from JSON: %w" , err )
}
azureLogAnalyticsTarget := queryJSONModel . AzureLogAnalytics
2022-11-04 08:28:38 -05:00
logger . Debug ( "AzureLogAnalytics" , "target" , azureLogAnalyticsTarget )
2020-04-27 10:43:02 -05:00
2020-05-26 09:52:33 -05:00
resultFormat := azureLogAnalyticsTarget . ResultFormat
2020-04-27 10:43:02 -05:00
if resultFormat == "" {
2022-03-02 08:41:07 -06:00
resultFormat = types . TimeSeries
2020-04-27 10:43:02 -05:00
}
2021-05-20 03:16:29 -05:00
apiURL := getApiURL ( queryJSONModel )
2020-04-27 10:43:02 -05:00
2022-11-04 08:28:38 -05:00
rawQuery , err := macros . KqlInterpolate ( logger , query , dsInfo , azureLogAnalyticsTarget . Query , "TimeGenerated" )
2020-04-27 10:43:02 -05:00
if err != nil {
return nil , err
}
2023-01-12 10:25:13 -06:00
resources := [ ] string { }
if len ( azureLogAnalyticsTarget . Resources ) > 0 {
resources = azureLogAnalyticsTarget . Resources
} else if azureLogAnalyticsTarget . Resource != "" {
resources = [ ] string { azureLogAnalyticsTarget . Resource }
}
2020-04-27 10:43:02 -05:00
azureLogAnalyticsQueries = append ( azureLogAnalyticsQueries , & AzureLogAnalyticsQuery {
2021-03-08 00:02:49 -06:00
RefID : query . RefID ,
2020-04-27 10:43:02 -05:00
ResultFormat : resultFormat ,
URL : apiURL ,
2021-06-07 07:54:51 -05:00
JSON : query . JSON ,
TimeRange : query . TimeRange ,
2023-01-12 10:25:13 -06:00
Query : rawQuery ,
Resources : resources ,
2020-04-27 10:43:02 -05:00
} )
}
return azureLogAnalyticsQueries , nil
}
2022-11-04 08:28:38 -05:00
func ( e * AzureLogAnalyticsDatasource ) executeQuery ( ctx context . Context , logger log . Logger , query * AzureLogAnalyticsQuery , dsInfo types . DatasourceInfo , client * http . Client ,
2022-01-20 04:10:12 -06:00
url string , tracer tracing . Tracer ) backend . DataResponse {
2021-06-07 07:54:51 -05:00
dataResponse := backend . DataResponse { }
2020-04-27 10:43:02 -05:00
2021-06-07 07:54:51 -05:00
dataResponseErrorWithExecuted := func ( err error ) backend . DataResponse {
dataResponse . Error = err
dataResponse . Frames = data . Frames {
2020-06-30 15:05:53 -05:00
& data . Frame {
RefID : query . RefID ,
Meta : & data . FrameMeta {
2023-01-12 10:25:13 -06:00
ExecutedQueryString : query . Query ,
2020-06-30 15:05:53 -05:00
} ,
} ,
}
2021-06-07 07:54:51 -05:00
return dataResponse
2020-06-05 11:32:10 -05:00
}
2021-07-14 02:53:24 -05:00
// If azureLogAnalyticsSameAs is defined and set to false, return an error
if sameAs , ok := dsInfo . JSONData [ "azureLogAnalyticsSameAs" ] ; ok && ! sameAs . ( bool ) {
2022-06-24 01:56:58 -05:00
return dataResponseErrorWithExecuted ( fmt . Errorf ( "credentials for Log Analytics are no longer supported. Go to the data source configuration to update Azure Monitor credentials" ) )
2021-07-14 02:53:24 -05:00
}
2023-01-12 10:25:13 -06:00
req , err := e . createRequest ( ctx , logger , url , query )
2020-04-27 10:43:02 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
dataResponse . Error = err
return dataResponse
2020-04-27 10:43:02 -05:00
}
2022-01-20 04:10:12 -06:00
ctx , span := tracer . Start ( ctx , "azure log analytics query" )
2023-01-12 10:25:13 -06:00
span . SetAttributes ( "target" , query . Query , attribute . Key ( "target" ) . String ( query . Query ) )
2022-01-20 04:10:12 -06:00
span . SetAttributes ( "from" , query . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) , attribute . Key ( "from" ) . Int64 ( query . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) ) )
span . SetAttributes ( "until" , query . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) , attribute . Key ( "until" ) . Int64 ( query . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) ) )
span . SetAttributes ( "datasource_id" , dsInfo . DatasourceID , attribute . Key ( "datasource_id" ) . Int64 ( dsInfo . DatasourceID ) )
span . SetAttributes ( "org_id" , dsInfo . OrgID , attribute . Key ( "org_id" ) . Int64 ( dsInfo . OrgID ) )
2020-04-27 10:43:02 -05:00
2022-01-20 04:10:12 -06:00
defer span . End ( )
2020-04-27 10:43:02 -05:00
2022-01-20 04:10:12 -06:00
tracer . Inject ( ctx , req . Header , span )
2020-04-27 10:43:02 -05:00
2022-11-04 08:28:38 -05:00
logger . Debug ( "AzureLogAnalytics" , "Request ApiURL" , req . URL . String ( ) )
2022-04-11 13:20:10 -05:00
res , err := client . Do ( req )
2020-04-27 10:43:02 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
return dataResponseErrorWithExecuted ( err )
2020-04-27 10:43:02 -05:00
}
2023-02-23 08:10:03 -06:00
defer func ( ) {
err := res . Body . Close ( )
if err != nil {
logger . Warn ( "failed to close response body" , "error" , err )
}
} ( )
2022-11-04 08:28:38 -05:00
logResponse , err := e . unmarshalResponse ( logger , res )
2020-04-27 10:43:02 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
return dataResponseErrorWithExecuted ( err )
2020-04-27 10:43:02 -05:00
}
2020-06-05 11:32:10 -05:00
t , err := logResponse . GetPrimaryResultTable ( )
if err != nil {
2021-06-07 07:54:51 -05:00
return dataResponseErrorWithExecuted ( err )
2020-06-05 11:32:10 -05:00
}
2020-04-27 10:43:02 -05:00
2023-01-12 10:25:13 -06:00
frame , err := ResponseTableToFrame ( t , query . RefID , query . Query )
2020-06-05 11:32:10 -05:00
if err != nil {
2021-06-07 07:54:51 -05:00
return dataResponseErrorWithExecuted ( err )
}
2022-11-24 04:25:40 -06:00
frame = appendErrorNotice ( frame , logResponse . Error )
if frame == nil {
return dataResponse
}
2021-06-07 07:54:51 -05:00
2023-03-29 11:01:18 -05:00
azurePortalBaseUrl , err := GetAzurePortalUrl ( dsInfo . Cloud )
2021-06-07 07:54:51 -05:00
if err != nil {
2023-03-29 11:01:18 -05:00
dataResponse . Error = err
return dataResponse
2020-04-27 10:43:02 -05:00
}
2023-03-29 11:01:18 -05:00
queryUrl , err := getQueryUrl ( query . Query , query . Resources , azurePortalBaseUrl )
2020-06-29 15:20:24 -05:00
if err != nil {
2023-03-29 11:01:18 -05:00
dataResponse . Error = err
return dataResponse
2020-06-29 15:20:24 -05:00
}
2020-06-05 11:32:10 -05:00
2022-03-02 08:41:07 -06:00
if query . ResultFormat == types . TimeSeries {
2020-06-05 11:32:10 -05:00
tsSchema := frame . TimeSeriesSchema ( )
if tsSchema . Type == data . TimeSeriesTypeLong {
2020-08-19 09:42:54 -05:00
wideFrame , err := data . LongToWide ( frame , nil )
2020-06-05 11:32:10 -05:00
if err == nil {
frame = wideFrame
} else {
frame . AppendNotices ( data . Notice { Severity : data . NoticeSeverityWarning , Text : "could not convert frame to time series, returning raw table: " + err . Error ( ) } )
}
}
}
2021-06-29 03:39:28 -05:00
2023-03-29 11:01:18 -05:00
AddConfigLinks ( * frame , queryUrl )
2021-06-07 07:54:51 -05:00
dataResponse . Frames = data . Frames { frame }
return dataResponse
2020-04-27 10:43:02 -05:00
}
2022-11-24 04:25:40 -06:00
func appendErrorNotice ( frame * data . Frame , err * AzureLogAnalyticsAPIError ) * data . Frame {
if err == nil {
return frame
}
if frame == nil {
frame = & data . Frame { }
2022-10-05 08:29:34 -05:00
}
2022-11-24 04:25:40 -06:00
frame . AppendNotices ( apiErrorToNotice ( err ) )
return frame
2022-10-05 08:29:34 -05:00
}
2023-01-12 10:25:13 -06:00
func ( e * AzureLogAnalyticsDatasource ) createRequest ( ctx context . Context , logger log . Logger , queryURL string , query * AzureLogAnalyticsQuery ) ( * http . Request , error ) {
body := map [ string ] interface { } {
"query" : query . Query ,
}
if len ( query . Resources ) > 1 {
body [ "resources" ] = query . Resources
}
jsonValue , err := json . Marshal ( body )
if err != nil {
return nil , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
}
req , err := http . NewRequestWithContext ( ctx , http . MethodPost , queryURL , bytes . NewBuffer ( jsonValue ) )
2020-04-27 10:43:02 -05:00
if err != nil {
2022-11-04 08:28:38 -05:00
logger . Debug ( "Failed to create request" , "error" , err )
2022-06-03 02:24:24 -05:00
return nil , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
2020-04-27 10:43:02 -05:00
}
2021-06-11 10:02:24 -05:00
req . URL . Path = "/"
2020-04-27 10:43:02 -05:00
req . Header . Set ( "Content-Type" , "application/json" )
2023-01-12 10:25:13 -06:00
req . URL . Path = path . Join ( req . URL . Path , query . URL )
2020-04-27 10:43:02 -05:00
return req , nil
}
2023-03-29 11:01:18 -05:00
type AzureLogAnalyticsURLResources struct {
Resources [ ] AzureLogAnalyticsURLResource ` json:"resources" `
}
type AzureLogAnalyticsURLResource struct {
ResourceID string ` json:"resourceId" `
}
func getQueryUrl ( query string , resources [ ] string , azurePortalUrl string ) ( string , error ) {
encodedQuery , err := encodeQuery ( query )
if err != nil {
return "" , fmt . Errorf ( "failed to encode the query: %s" , err )
}
portalUrl := azurePortalUrl
if err != nil {
return "" , fmt . Errorf ( "failed to parse base portal URL: %s" , err )
}
portalUrl += "/#blade/Microsoft_OperationsManagementSuite_Workspace/AnalyticsBlade/initiator/AnalyticsShareLinkToQuery/isQueryEditorVisible/true/scope/"
resourcesJson := AzureLogAnalyticsURLResources {
Resources : make ( [ ] AzureLogAnalyticsURLResource , 0 ) ,
}
for _ , resource := range resources {
resourcesJson . Resources = append ( resourcesJson . Resources , AzureLogAnalyticsURLResource {
ResourceID : resource ,
} )
}
resourcesMarshalled , err := json . Marshal ( resourcesJson )
if err != nil {
return "" , fmt . Errorf ( "failed to marshal log analytics resources: %s" , err )
}
portalUrl += url . QueryEscape ( string ( resourcesMarshalled ) )
portalUrl += "/query/" + url . PathEscape ( encodedQuery ) + "/isQueryBase64Compressed/true/timespanInIsoFormat/P1D"
return portalUrl , nil
}
2022-06-24 01:56:58 -05:00
// Error definition has been inferred from real data and other model definitions like
// https://github.com/Azure/azure-sdk-for-go/blob/3640559afddbad452d265b54fb1c20b30be0b062/services/preview/virtualmachineimagebuilder/mgmt/2019-05-01-preview/virtualmachineimagebuilder/models.go
type AzureLogAnalyticsAPIError struct {
Details * [ ] AzureLogAnalyticsAPIErrorBase ` json:"details,omitempty" `
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
}
type AzureLogAnalyticsAPIErrorBase struct {
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
Innererror * AzureLogAnalyticsInnerError ` json:"innererror,omitempty" `
}
type AzureLogAnalyticsInnerError struct {
Code * string ` json:"code,omitempty" `
Message * string ` json:"message,omitempty" `
Severity * int ` json:"severity,omitempty" `
SeverityName * string ` json:"severityName,omitempty" `
}
2022-03-02 08:41:07 -06:00
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
type AzureLogAnalyticsResponse struct {
Tables [ ] types . AzureResponseTable ` json:"tables" `
2022-06-24 01:56:58 -05:00
Error * AzureLogAnalyticsAPIError ` json:"error,omitempty" `
2022-03-02 08:41:07 -06:00
}
2020-06-05 11:32:10 -05:00
// GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an
// error if there is no table by that name.
2022-03-02 08:41:07 -06:00
func ( ar * AzureLogAnalyticsResponse ) GetPrimaryResultTable ( ) ( * types . AzureResponseTable , error ) {
2020-06-05 11:32:10 -05:00
for _ , t := range ar . Tables {
if t . Name == "PrimaryResult" {
return & t , nil
}
}
2020-09-22 13:00:59 -05:00
return nil , fmt . Errorf ( "no data as PrimaryResult table is missing from the response" )
2020-06-05 11:32:10 -05:00
}
2022-11-04 08:28:38 -05:00
func ( e * AzureLogAnalyticsDatasource ) unmarshalResponse ( logger log . Logger , res * http . Response ) ( AzureLogAnalyticsResponse , error ) {
2022-08-10 08:37:51 -05:00
body , err := io . ReadAll ( res . Body )
2020-04-27 10:43:02 -05:00
if err != nil {
return AzureLogAnalyticsResponse { } , err
}
2020-12-15 02:32:06 -06:00
defer func ( ) {
if err := res . Body . Close ( ) ; err != nil {
2022-11-04 08:28:38 -05:00
logger . Warn ( "Failed to close response body" , "err" , err )
2020-12-15 02:32:06 -06:00
}
} ( )
2020-04-27 10:43:02 -05:00
if res . StatusCode / 100 != 2 {
2022-11-04 08:28:38 -05:00
logger . Debug ( "Request failed" , "status" , res . Status , "body" , string ( body ) )
2020-11-05 04:29:39 -06:00
return AzureLogAnalyticsResponse { } , fmt . Errorf ( "request failed, status: %s, body: %s" , res . Status , string ( body ) )
2020-04-27 10:43:02 -05:00
}
var data AzureLogAnalyticsResponse
2020-06-05 11:32:10 -05:00
d := json . NewDecoder ( bytes . NewReader ( body ) )
d . UseNumber ( )
err = d . Decode ( & data )
2020-04-27 10:43:02 -05:00
if err != nil {
2022-11-04 08:28:38 -05:00
logger . Debug ( "Failed to unmarshal Azure Log Analytics response" , "error" , err , "status" , res . Status , "body" , string ( body ) )
2020-04-27 10:43:02 -05:00
return AzureLogAnalyticsResponse { } , err
}
return data , nil
}
2020-06-29 15:20:24 -05:00
// LogAnalyticsMeta is a type for the a Frame's Meta's Custom property.
type LogAnalyticsMeta struct {
2023-03-29 11:01:18 -05:00
ColumnTypes [ ] string ` json:"azureColumnTypes" `
2020-04-27 10:43:02 -05:00
}
2020-05-11 12:11:03 -05:00
2020-06-05 11:32:10 -05:00
// encodeQuery encodes the query in gzip so the frontend can build links.
2023-03-29 11:01:18 -05:00
func encodeQuery ( rawQuery string ) ( string , error ) {
2020-05-11 12:11:03 -05:00
var b bytes . Buffer
gz := gzip . NewWriter ( & b )
if _ , err := gz . Write ( [ ] byte ( rawQuery ) ) ; err != nil {
2023-03-29 11:01:18 -05:00
return "" , err
2020-05-11 12:11:03 -05:00
}
if err := gz . Close ( ) ; err != nil {
2023-03-29 11:01:18 -05:00
return "" , err
2020-05-11 12:11:03 -05:00
}
2023-03-29 11:01:18 -05:00
return base64 . StdEncoding . EncodeToString ( b . Bytes ( ) ) , nil
2020-05-11 12:11:03 -05:00
}