2022-03-02 06:41:07 -08:00
package loganalytics
2020-04-27 17:43:02 +02:00
import (
2020-05-11 19:11:03 +02:00
"bytes"
"compress/gzip"
2020-04-27 17:43:02 +02:00
"context"
2023-03-29 17:01:18 +01:00
"encoding/base64"
2020-04-27 17:43:02 +02:00
"encoding/json"
"fmt"
2022-08-10 13:37:51 +00:00
"io"
2020-04-27 17:43:02 +02:00
"net/http"
2023-03-29 17:01:18 +01:00
"net/url"
2020-04-27 17:43:02 +02:00
"path"
2021-05-20 09:16:29 +01:00
"regexp"
2023-04-27 20:24:11 +01:00
"strings"
2021-06-07 14:54:51 +02:00
"time"
2020-04-27 17:43:02 +02:00
2021-06-07 14:54:51 +02:00
"github.com/grafana/grafana-plugin-sdk-go/backend"
2023-10-04 16:38:26 -07:00
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
2020-06-05 12:32:10 -04:00
"github.com/grafana/grafana-plugin-sdk-go/data"
2022-04-11 14:20:10 -04:00
"go.opentelemetry.io/otel/attribute"
2023-10-03 14:54:20 +02:00
"go.opentelemetry.io/otel/trace"
2022-04-11 14:20:10 -04:00
2023-04-27 20:24:11 +01:00
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
2022-03-02 06:41:07 -08:00
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
2024-09-24 18:45:44 +01:00
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/utils"
2020-04-27 17:43:02 +02:00
)
2023-08-30 08:39:31 -05:00
func ( e * AzureLogAnalyticsDatasource ) ResourceRequest ( rw http . ResponseWriter , req * http . Request , cli * http . Client ) ( http . ResponseWriter , error ) {
2024-05-28 18:06:27 +01:00
if req . URL . Path == "/usage/basiclogs" {
newUrl := & url . URL {
Scheme : req . URL . Scheme ,
Host : req . URL . Host ,
Path : "/v1/query" ,
}
return e . GetBasicLogsUsage ( req . Context ( ) , newUrl . String ( ) , cli , rw , req . Body )
}
2023-08-30 08:39:31 -05:00
return e . Proxy . Do ( rw , req , cli )
2021-07-16 12:47:26 +02:00
}
2024-05-28 18:06:27 +01:00
// builds and executes a new query request that will get the data ingeted for the given table in the basic logs query
func ( e * AzureLogAnalyticsDatasource ) GetBasicLogsUsage ( ctx context . Context , url string , client * http . Client , rw http . ResponseWriter , reqBody io . ReadCloser ) ( http . ResponseWriter , error ) {
// read the full body
originalPayload , readErr := io . ReadAll ( reqBody )
if readErr != nil {
return rw , fmt . Errorf ( "failed to read request body %w" , readErr )
}
var payload BasicLogsUsagePayload
jsonErr := json . Unmarshal ( originalPayload , & payload )
if jsonErr != nil {
return rw , fmt . Errorf ( "error decoding basic logs table usage payload: %w" , jsonErr )
}
table := payload . Table
from , fromErr := ConvertTime ( payload . From )
if fromErr != nil {
return rw , fmt . Errorf ( "failed to convert from time: %w" , fromErr )
}
to , toErr := ConvertTime ( payload . To )
if toErr != nil {
return rw , fmt . Errorf ( "failed to convert to time: %w" , toErr )
}
// basic logs queries only show data for last 8 days or less
// data volume query should also only calculate volume for last 8 days if time range exceeds that.
diff := to . Sub ( from ) . Hours ( )
if diff > float64 ( MaxHoursBasicLogs ) {
from = to . Add ( - time . Duration ( MaxHoursBasicLogs ) * time . Hour )
}
dataVolumeQueryRaw := GetDataVolumeRawQuery ( table )
dataVolumeQuery := & AzureLogAnalyticsQuery {
Query : dataVolumeQueryRaw ,
DashboardTime : true , // necessary to ensure TimeRange property is used since query will not have an in-query time filter
TimeRange : backend . TimeRange {
From : from ,
To : to ,
} ,
TimeColumn : "TimeGenerated" ,
Resources : [ ] string { payload . Resource } ,
2025-01-14 15:58:38 +01:00
QueryType : dataquery . AzureQueryTypeLogAnalytics ,
2024-05-28 18:06:27 +01:00
URL : getApiURL ( payload . Resource , false , false ) ,
}
req , err := e . createRequest ( ctx , url , dataVolumeQuery )
if err != nil {
return rw , err
}
_ , span := tracing . DefaultTracer ( ) . Start ( ctx , "azure basic logs usage query" , trace . WithAttributes (
attribute . String ( "target" , dataVolumeQuery . Query ) ,
attribute . String ( "table" , table ) ,
attribute . Int64 ( "from" , dataVolumeQuery . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) ) ,
attribute . Int64 ( "until" , dataVolumeQuery . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) ) ,
) )
defer span . End ( )
resp , err := client . Do ( req )
if err != nil {
return rw , err
}
defer func ( ) {
if err := resp . Body . Close ( ) ; err != nil {
e . Logger . Warn ( "Failed to close response body for data volume request" , "err" , err )
}
} ( )
logResponse , err := e . unmarshalResponse ( resp )
if err != nil {
return rw , err
}
t , err := logResponse . GetPrimaryResultTable ( )
if err != nil {
return rw , err
}
num := t . Rows [ 0 ] [ 0 ] . ( json . Number )
value , err := num . Float64 ( )
if err != nil {
return rw , err
}
_ , err = rw . Write ( [ ] byte ( fmt . Sprintf ( "%f" , value ) ) )
if err != nil {
return rw , err
}
return rw , err
}
2020-04-27 17:43:02 +02:00
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
2021-06-07 14:54:51 +02:00
// 3. parses the responses for each query into data frames
2024-05-28 18:06:27 +01:00
func ( e * AzureLogAnalyticsDatasource ) ExecuteTimeSeriesQuery ( ctx context . Context , originalQueries [ ] backend . DataQuery , dsInfo types . DatasourceInfo , client * http . Client , url string , fromAlert bool ) ( * backend . QueryDataResponse , error ) {
2021-06-07 14:54:51 +02:00
result := backend . NewQueryDataResponse ( )
2020-04-27 17:43:02 +02:00
2024-09-09 10:29:35 +01:00
for _ , query := range originalQueries {
logsQuery , err := e . buildQuery ( ctx , query , dsInfo , fromAlert )
if err != nil {
2024-12-06 16:38:09 +01:00
result . Responses [ query . RefID ] = backend . ErrorResponseWithErrorSource ( err )
2024-09-09 10:29:35 +01:00
continue
}
res , err := e . executeQuery ( ctx , logsQuery , dsInfo , client , url )
2023-08-30 08:39:31 -05:00
if err != nil {
2024-12-06 16:38:09 +01:00
result . Responses [ query . RefID ] = backend . ErrorResponseWithErrorSource ( err )
2023-08-30 08:39:31 -05:00
continue
}
result . Responses [ query . RefID ] = * res
2020-04-27 17:43:02 +02:00
}
return result , nil
}
2024-05-28 18:06:27 +01:00
func buildLogAnalyticsQuery ( query backend . DataQuery , dsInfo types . DatasourceInfo , appInsightsRegExp * regexp . Regexp , fromAlert bool ) ( * AzureLogAnalyticsQuery , error ) {
2024-05-10 17:11:54 +01:00
queryJSONModel := types . LogJSONQuery { }
err := json . Unmarshal ( query . JSON , & queryJSONModel )
if err != nil {
return nil , fmt . Errorf ( "failed to decode the Azure Log Analytics query object from JSON: %w" , err )
}
2024-05-28 18:06:27 +01:00
2024-05-10 17:11:54 +01:00
var queryString string
appInsightsQuery := false
dashboardTime := false
timeColumn := ""
azureLogAnalyticsTarget := queryJSONModel . AzureLogAnalytics
2024-05-28 18:06:27 +01:00
basicLogsQuery := false
2024-09-09 13:35:56 -06:00
basicLogsEnabled := false
2024-05-10 17:11:54 +01:00
2025-01-14 15:58:38 +01:00
resultFormat := ParseResultFormat ( azureLogAnalyticsTarget . ResultFormat , dataquery . AzureQueryTypeLogAnalytics )
2024-05-10 17:11:54 +01:00
2024-05-28 18:06:27 +01:00
basicLogsQueryFlag := false
if azureLogAnalyticsTarget . BasicLogsQuery != nil {
basicLogsQueryFlag = * azureLogAnalyticsTarget . BasicLogsQuery
}
2024-05-10 17:11:54 +01:00
resources , resourceOrWorkspace := retrieveResources ( azureLogAnalyticsTarget )
appInsightsQuery = appInsightsRegExp . Match ( [ ] byte ( resourceOrWorkspace ) )
2024-09-09 13:35:56 -06:00
if value , ok := dsInfo . JSONData [ "basicLogsEnabled" ] . ( bool ) ; ok {
basicLogsEnabled = value
}
2024-05-28 18:06:27 +01:00
if basicLogsQueryFlag {
2024-09-09 13:35:56 -06:00
if meetsBasicLogsCriteria , meetsBasicLogsCriteriaErr := meetsBasicLogsCriteria ( resources , fromAlert , basicLogsEnabled ) ; meetsBasicLogsCriteriaErr != nil {
2024-05-28 18:06:27 +01:00
return nil , meetsBasicLogsCriteriaErr
} else {
basicLogsQuery = meetsBasicLogsCriteria
}
}
2024-05-10 17:11:54 +01:00
if azureLogAnalyticsTarget . Query != nil {
queryString = * azureLogAnalyticsTarget . Query
}
2021-05-20 09:16:29 +01:00
2024-05-10 17:11:54 +01:00
if azureLogAnalyticsTarget . DashboardTime != nil {
dashboardTime = * azureLogAnalyticsTarget . DashboardTime
if dashboardTime {
if azureLogAnalyticsTarget . TimeColumn != nil {
timeColumn = * azureLogAnalyticsTarget . TimeColumn
} else {
// Final fallback to TimeGenerated if no column is provided
timeColumn = "TimeGenerated"
}
2023-07-04 10:45:31 +01:00
}
2021-05-20 09:16:29 +01:00
}
2024-05-10 17:11:54 +01:00
2024-05-28 18:06:27 +01:00
apiURL := getApiURL ( resourceOrWorkspace , appInsightsQuery , basicLogsQuery )
2024-05-10 17:11:54 +01:00
rawQuery , err := macros . KqlInterpolate ( query , dsInfo , queryString , "TimeGenerated" )
if err != nil {
return nil , err
}
return & AzureLogAnalyticsQuery {
RefID : query . RefID ,
ResultFormat : resultFormat ,
URL : apiURL ,
JSON : query . JSON ,
TimeRange : query . TimeRange ,
Query : rawQuery ,
Resources : resources ,
QueryType : dataquery . AzureQueryType ( query . QueryType ) ,
AppInsightsQuery : appInsightsQuery ,
DashboardTime : dashboardTime ,
TimeColumn : timeColumn ,
2024-05-28 18:06:27 +01:00
BasicLogs : basicLogsQuery ,
2024-05-10 17:11:54 +01:00
} , nil
2021-05-20 09:16:29 +01:00
}
2024-09-09 10:29:35 +01:00
func ( e * AzureLogAnalyticsDatasource ) buildQuery ( ctx context . Context , query backend . DataQuery , dsInfo types . DatasourceInfo , fromAlert bool ) ( * AzureLogAnalyticsQuery , error ) {
var azureLogAnalyticsQuery * AzureLogAnalyticsQuery
2024-06-04 16:16:31 +01:00
appInsightsRegExp , err := regexp . Compile ( "(?i)providers/microsoft.insights/components" )
2023-07-04 10:45:31 +01:00
if err != nil {
return nil , fmt . Errorf ( "failed to compile Application Insights regex" )
}
2020-04-27 17:43:02 +02:00
2025-01-14 15:58:38 +01:00
if query . QueryType == string ( dataquery . AzureQueryTypeLogAnalytics ) {
2024-09-09 10:29:35 +01:00
azureLogAnalyticsQuery , err = buildLogAnalyticsQuery ( query , dsInfo , appInsightsRegExp , fromAlert )
if err != nil {
errorMessage := fmt . Errorf ( "failed to build azure log analytics query: %w" , err )
2024-09-24 18:45:44 +01:00
return nil , utils . ApplySourceFromError ( errorMessage , err )
2020-05-26 10:52:33 -04:00
}
2024-09-09 10:29:35 +01:00
}
2020-05-26 10:52:33 -04:00
2025-01-14 15:58:38 +01:00
if query . QueryType == string ( dataquery . AzureQueryTypeAzureTraces ) || query . QueryType == string ( dataquery . AzureQueryTypeTraceExemplar ) {
if query . QueryType == string ( dataquery . AzureQueryTypeTraceExemplar ) {
2024-09-09 10:29:35 +01:00
cfg := backend . GrafanaConfigFromContext ( ctx )
hasPromExemplarsToggle := cfg . FeatureToggles ( ) . IsEnabled ( "azureMonitorPrometheusExemplars" )
if ! hasPromExemplarsToggle {
2024-12-06 16:38:09 +01:00
return nil , backend . DownstreamError ( fmt . Errorf ( "query type unsupported as azureMonitorPrometheusExemplars feature toggle is not enabled" ) )
2024-06-06 17:53:17 +01:00
}
2024-09-09 10:29:35 +01:00
}
azureAppInsightsQuery , err := buildAppInsightsQuery ( ctx , query , dsInfo , appInsightsRegExp , e . Logger )
if err != nil {
errorMessage := fmt . Errorf ( "failed to build azure application insights query: %w" , err )
2024-09-24 18:45:44 +01:00
return nil , utils . ApplySourceFromError ( errorMessage , err )
2020-04-27 17:43:02 +02:00
}
2024-09-09 10:29:35 +01:00
azureLogAnalyticsQuery = azureAppInsightsQuery
2020-04-27 17:43:02 +02:00
}
2024-09-09 10:29:35 +01:00
return azureLogAnalyticsQuery , nil
2020-04-27 17:43:02 +02:00
}
2023-10-04 16:38:26 -07:00
func ( e * AzureLogAnalyticsDatasource ) executeQuery ( ctx context . Context , query * AzureLogAnalyticsQuery , dsInfo types . DatasourceInfo , client * http . Client , url string ) ( * backend . DataResponse , error ) {
2021-07-14 09:53:24 +02:00
// If azureLogAnalyticsSameAs is defined and set to false, return an error
if sameAs , ok := dsInfo . JSONData [ "azureLogAnalyticsSameAs" ] ; ok && ! sameAs . ( bool ) {
2024-12-06 16:38:09 +01:00
return nil , backend . DownstreamError ( fmt . Errorf ( "credentials for Log Analytics are no longer supported. Go to the data source configuration to update Azure Monitor credentials" ) )
2021-07-14 09:53:24 +02:00
}
2023-04-27 20:24:11 +01:00
queryJSONModel := dataquery . AzureMonitorQuery { }
err := json . Unmarshal ( query . JSON , & queryJSONModel )
if err != nil {
2023-08-30 08:39:31 -05:00
return nil , err
2023-04-27 20:24:11 +01:00
}
2023-10-18 10:19:35 -05:00
if query . QueryType == dataquery . AzureQueryTypeAzureTraces {
if query . ResultFormat == dataquery . ResultFormatTrace && query . Query == "" {
2024-12-06 16:38:09 +01:00
return nil , backend . DownstreamError ( fmt . Errorf ( "cannot visualise trace events using the trace visualiser" ) )
2023-04-27 20:24:11 +01:00
}
}
2023-08-30 08:39:31 -05:00
req , err := e . createRequest ( ctx , url , query )
2020-04-27 17:43:02 +02:00
if err != nil {
2023-08-30 08:39:31 -05:00
return nil , err
2020-04-27 17:43:02 +02:00
}
2023-10-04 16:38:26 -07:00
_ , span := tracing . DefaultTracer ( ) . Start ( ctx , "azure log analytics query" , trace . WithAttributes (
2023-10-03 14:54:20 +02:00
attribute . String ( "target" , query . Query ) ,
2024-05-28 18:06:27 +01:00
attribute . Bool ( "basic_logs" , query . BasicLogs ) ,
2023-10-03 14:54:20 +02:00
attribute . Int64 ( "from" , query . TimeRange . From . UnixNano ( ) / int64 ( time . Millisecond ) ) ,
attribute . Int64 ( "until" , query . TimeRange . To . UnixNano ( ) / int64 ( time . Millisecond ) ) ,
attribute . Int64 ( "datasource_id" , dsInfo . DatasourceID ) ,
attribute . Int64 ( "org_id" , dsInfo . OrgID ) ,
) )
2022-01-20 11:10:12 +01:00
defer span . End ( )
2020-04-27 17:43:02 +02:00
2022-04-11 14:20:10 -04:00
res , err := client . Do ( req )
2020-04-27 17:43:02 +02:00
if err != nil {
2024-12-06 16:38:09 +01:00
return nil , backend . DownstreamError ( err )
2020-04-27 17:43:02 +02:00
}
2023-02-23 15:10:03 +01:00
defer func ( ) {
2023-09-11 09:02:44 -05:00
if err := res . Body . Close ( ) ; err != nil {
2024-01-26 14:53:55 -08:00
e . Logger . Warn ( "Failed to close response body" , "err" , err )
2023-09-11 09:02:44 -05:00
}
2023-02-23 15:10:03 +01:00
} ( )
2023-08-30 08:39:31 -05:00
logResponse , err := e . unmarshalResponse ( res )
2020-04-27 17:43:02 +02:00
if err != nil {
2023-08-30 08:39:31 -05:00
return nil , err
2020-04-27 17:43:02 +02:00
}
2020-06-05 12:32:10 -04:00
t , err := logResponse . GetPrimaryResultTable ( )
if err != nil {
2023-08-30 08:39:31 -05:00
return nil , err
2020-06-05 12:32:10 -04:00
}
2020-04-27 17:43:02 +02:00
2024-10-24 14:32:09 +01:00
logLimitDisabled := backend . GrafanaConfigFromContext ( ctx ) . FeatureToggles ( ) . IsEnabled ( "azureMonitorDisableLogLimit" )
2024-12-11 13:02:51 +00:00
2024-10-24 14:32:09 +01:00
frame , err := ResponseTableToFrame ( t , query . RefID , query . Query , query . QueryType , query . ResultFormat , logLimitDisabled )
2020-06-05 12:32:10 -04:00
if err != nil {
2023-08-30 08:39:31 -05:00
return nil , err
2021-06-07 14:54:51 +02:00
}
2024-12-11 13:02:51 +00:00
2022-11-24 11:25:40 +01:00
frame = appendErrorNotice ( frame , logResponse . Error )
if frame == nil {
2024-12-11 13:02:51 +00:00
return & backend . DataResponse { } , nil
}
// Ensure Meta.Custom is initialized
if frame . Meta . Custom == nil {
frame . Meta . Custom = & LogAnalyticsMeta {
ColumnTypes : make ( [ ] string , 0 ) ,
}
2022-11-24 11:25:40 +01:00
}
2021-06-07 14:54:51 +02:00
2024-02-08 04:42:20 -06:00
queryUrl , err := getQueryUrl ( query . Query , query . Resources , dsInfo . Routes [ "Azure Portal" ] . URL , query . TimeRange )
2023-10-18 10:19:35 -05:00
if err != nil {
return nil , err
2023-04-27 20:24:11 +01:00
}
2024-12-11 13:02:51 +00:00
// Set the preferred visualization
switch query . ResultFormat {
case dataquery . ResultFormatTrace :
2025-01-14 15:58:38 +01:00
if query . QueryType == dataquery . AzureQueryTypeAzureTraces || query . QueryType == dataquery . AzureQueryTypeTraceExemplar {
2024-12-11 13:02:51 +00:00
frame . Meta . PreferredVisualization = data . VisTypeTrace
}
case dataquery . ResultFormatTable :
2023-10-18 10:19:35 -05:00
frame . Meta . PreferredVisualization = data . VisTypeTable
2024-12-11 13:02:51 +00:00
case dataquery . ResultFormatLogs :
2023-10-18 10:19:35 -05:00
frame . Meta . PreferredVisualization = data . VisTypeLogs
2024-12-11 13:02:51 +00:00
if logMeta , ok := frame . Meta . Custom . ( * LogAnalyticsMeta ) ; ok {
frame . Meta . Custom = & LogAnalyticsMeta {
ColumnTypes : logMeta . ColumnTypes ,
AzurePortalLink : queryUrl ,
}
} else {
frame . Meta . Custom = & LogAnalyticsMeta {
AzurePortalLink : queryUrl ,
}
2023-10-18 10:19:35 -05:00
}
2024-12-11 13:02:51 +00:00
case dataquery . ResultFormatTimeSeries :
2020-06-05 12:32:10 -04:00
tsSchema := frame . TimeSeriesSchema ( )
if tsSchema . Type == data . TimeSeriesTypeLong {
2020-08-19 10:42:54 -04:00
wideFrame , err := data . LongToWide ( frame , nil )
2020-06-05 12:32:10 -04:00
if err == nil {
frame = wideFrame
} else {
2024-12-11 13:02:51 +00:00
frame . AppendNotices ( data . Notice {
Severity : data . NoticeSeverityWarning ,
Text : "could not convert frame to time series, returning raw table: " + err . Error ( ) ,
} )
2020-06-05 12:32:10 -04:00
}
}
}
2021-06-29 10:39:28 +02:00
2023-10-18 10:19:35 -05:00
// Use the parent span query for the parent span data link
2024-02-08 04:42:20 -06:00
err = addDataLinksToFields ( query , dsInfo . Routes [ "Azure Portal" ] . URL , frame , dsInfo , queryUrl )
2023-04-27 20:24:11 +01:00
if err != nil {
2023-08-30 08:39:31 -05:00
return nil , err
2023-04-27 20:24:11 +01:00
}
2023-10-18 10:19:35 -05:00
dataResponse := backend . DataResponse { Frames : data . Frames { frame } }
return & dataResponse , nil
}
2023-04-27 20:24:11 +01:00
2023-10-18 10:19:35 -05:00
func addDataLinksToFields ( query * AzureLogAnalyticsQuery , azurePortalBaseUrl string , frame * data . Frame , dsInfo types . DatasourceInfo , queryUrl string ) error {
if query . QueryType == dataquery . AzureQueryTypeAzureTraces {
err := addTraceDataLinksToFields ( query , azurePortalBaseUrl , frame , dsInfo )
2023-04-27 20:24:11 +01:00
if err != nil {
2023-10-18 10:19:35 -05:00
return err
2023-04-27 20:24:11 +01:00
}
2023-10-18 10:19:35 -05:00
return nil
}
2023-04-27 20:24:11 +01:00
2023-10-18 10:19:35 -05:00
if query . ResultFormat == dataquery . ResultFormatLogs {
return nil
}
AddConfigLinks ( * frame , queryUrl , nil )
return nil
}
2023-04-27 20:24:11 +01:00
2023-10-18 10:19:35 -05:00
func addTraceDataLinksToFields ( query * AzureLogAnalyticsQuery , azurePortalBaseUrl string , frame * data . Frame , dsInfo types . DatasourceInfo ) error {
2024-10-09 14:21:10 +01:00
tracesUrl , err := getTracesQueryUrl ( azurePortalBaseUrl )
2023-10-18 10:19:35 -05:00
if err != nil {
return err
}
queryJSONModel := dataquery . AzureMonitorQuery { }
err = json . Unmarshal ( query . JSON , & queryJSONModel )
if err != nil {
return err
}
traceIdVariable := "${__data.fields.traceID}"
resultFormat := dataquery . ResultFormatTrace
queryJSONModel . AzureTraces . ResultFormat = & resultFormat
queryJSONModel . AzureTraces . Query = & query . TraceExploreQuery
if queryJSONModel . AzureTraces . OperationId == nil || * queryJSONModel . AzureTraces . OperationId == "" {
queryJSONModel . AzureTraces . OperationId = & traceIdVariable
}
2025-01-14 15:58:38 +01:00
logsQueryType := string ( dataquery . AzureQueryTypeLogAnalytics )
2023-10-18 10:19:35 -05:00
logsJSONModel := dataquery . AzureMonitorQuery {
2024-03-21 11:11:29 +01:00
QueryType : & logsQueryType ,
2023-10-18 10:19:35 -05:00
AzureLogAnalytics : & dataquery . AzureLogsQuery {
Query : & query . TraceLogsExploreQuery ,
Resources : [ ] string { queryJSONModel . AzureTraces . Resources [ 0 ] } ,
} ,
}
if query . ResultFormat == dataquery . ResultFormatTable {
2023-04-27 20:24:11 +01:00
AddCustomDataLink ( * frame , data . DataLink {
2023-10-18 10:19:35 -05:00
Title : "Explore Trace: ${__data.fields.traceID}" ,
2023-04-27 20:24:11 +01:00
URL : "" ,
Internal : & data . InternalDataLink {
DatasourceUID : dsInfo . DatasourceUID ,
DatasourceName : dsInfo . DatasourceName ,
2023-10-18 10:19:35 -05:00
Query : queryJSONModel ,
2023-04-27 20:24:11 +01:00
} ,
} )
2023-10-18 10:19:35 -05:00
queryJSONModel . AzureTraces . Query = & query . TraceParentExploreQuery
AddCustomDataLink ( * frame , data . DataLink {
Title : "Explore Parent Span: ${__data.fields.parentSpanID}" ,
URL : "" ,
Internal : & data . InternalDataLink {
DatasourceUID : dsInfo . DatasourceUID ,
DatasourceName : dsInfo . DatasourceName ,
Query : queryJSONModel ,
} ,
} )
linkTitle := "Explore Trace in Azure Portal"
AddConfigLinks ( * frame , tracesUrl , & linkTitle )
2023-04-27 20:24:11 +01:00
}
2023-10-18 10:19:35 -05:00
AddCustomDataLink ( * frame , data . DataLink {
Title : "Explore Trace Logs" ,
URL : "" ,
Internal : & data . InternalDataLink {
DatasourceUID : dsInfo . DatasourceUID ,
DatasourceName : dsInfo . DatasourceName ,
Query : logsJSONModel ,
} ,
} )
return nil
2020-04-27 17:43:02 +02:00
}
2022-11-24 11:25:40 +01:00
func appendErrorNotice ( frame * data . Frame , err * AzureLogAnalyticsAPIError ) * data . Frame {
if err == nil {
return frame
}
if frame == nil {
frame = & data . Frame { }
2022-10-05 15:29:34 +02:00
}
2022-11-24 11:25:40 +01:00
frame . AppendNotices ( apiErrorToNotice ( err ) )
return frame
2022-10-05 15:29:34 +02:00
}
2023-08-30 08:39:31 -05:00
func ( e * AzureLogAnalyticsDatasource ) createRequest ( ctx context . Context , queryURL string , query * AzureLogAnalyticsQuery ) ( * http . Request , error ) {
2023-01-12 17:25:13 +01:00
body := map [ string ] interface { } {
2023-07-17 12:02:16 +01:00
"query" : query . Query ,
}
2023-09-18 18:38:39 +01:00
if query . DashboardTime {
2023-07-17 12:02:16 +01:00
from := query . TimeRange . From . Format ( time . RFC3339 )
to := query . TimeRange . To . Format ( time . RFC3339 )
timespan := fmt . Sprintf ( "%s/%s" , from , to )
body [ "timespan" ] = timespan
2023-09-18 18:38:39 +01:00
body [ "query_datetimescope_from" ] = from
body [ "query_datetimescope_to" ] = to
body [ "query_datetimescope_column" ] = query . TimeColumn
2023-01-12 17:25:13 +01:00
}
2023-07-04 10:45:31 +01:00
2025-01-14 15:58:38 +01:00
if len ( query . Resources ) > 1 && query . QueryType == dataquery . AzureQueryTypeLogAnalytics && ! query . AppInsightsQuery {
2024-02-27 16:46:29 +01:00
str := strings . ToLower ( query . Resources [ 0 ] )
if strings . Contains ( str , "microsoft.operationalinsights/workspaces" ) {
body [ "workspaces" ] = query . Resources
} else {
body [ "resources" ] = query . Resources
}
2023-01-12 17:25:13 +01:00
}
2024-02-27 16:46:29 +01:00
2023-07-04 10:45:31 +01:00
if query . AppInsightsQuery {
2024-10-23 12:30:06 +01:00
body [ "applications" ] = [ ] string { query . Resources [ 0 ] }
2023-07-04 10:45:31 +01:00
}
2024-02-27 16:46:29 +01:00
2023-01-12 17:25:13 +01:00
jsonValue , err := json . Marshal ( body )
if err != nil {
return nil , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
}
req , err := http . NewRequestWithContext ( ctx , http . MethodPost , queryURL , bytes . NewBuffer ( jsonValue ) )
2020-04-27 17:43:02 +02:00
if err != nil {
2022-06-03 03:24:24 -04:00
return nil , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
2020-04-27 17:43:02 +02:00
}
2024-02-27 16:46:29 +01:00
2021-06-11 17:02:24 +02:00
req . URL . Path = "/"
2020-04-27 17:43:02 +02:00
req . Header . Set ( "Content-Type" , "application/json" )
2023-01-12 17:25:13 +01:00
req . URL . Path = path . Join ( req . URL . Path , query . URL )
2020-04-27 17:43:02 +02:00
return req , nil
}
2023-03-29 17:01:18 +01:00
type AzureLogAnalyticsURLResources struct {
Resources [ ] AzureLogAnalyticsURLResource ` json:"resources" `
}
type AzureLogAnalyticsURLResource struct {
ResourceID string ` json:"resourceId" `
}
2023-07-19 09:15:14 +01:00
func getQueryUrl ( query string , resources [ ] string , azurePortalUrl string , timeRange backend . TimeRange ) ( string , error ) {
2023-03-29 17:01:18 +01:00
encodedQuery , err := encodeQuery ( query )
if err != nil {
return "" , fmt . Errorf ( "failed to encode the query: %s" , err )
}
2024-05-10 17:11:54 +01:00
portalUrl := azurePortalUrl + "/#blade/Microsoft_OperationsManagementSuite_Workspace/AnalyticsBlade/initiator/AnalyticsShareLinkToQuery/isQueryEditorVisible/true/scope/"
2023-03-29 17:01:18 +01:00
resourcesJson := AzureLogAnalyticsURLResources {
Resources : make ( [ ] AzureLogAnalyticsURLResource , 0 ) ,
}
for _ , resource := range resources {
resourcesJson . Resources = append ( resourcesJson . Resources , AzureLogAnalyticsURLResource {
ResourceID : resource ,
} )
}
resourcesMarshalled , err := json . Marshal ( resourcesJson )
if err != nil {
return "" , fmt . Errorf ( "failed to marshal log analytics resources: %s" , err )
}
2023-07-19 09:15:14 +01:00
from := timeRange . From . Format ( time . RFC3339 )
to := timeRange . To . Format ( time . RFC3339 )
timespan := url . QueryEscape ( fmt . Sprintf ( "%s/%s" , from , to ) )
2023-03-29 17:01:18 +01:00
portalUrl += url . QueryEscape ( string ( resourcesMarshalled ) )
2023-07-19 09:15:14 +01:00
portalUrl += "/query/" + url . PathEscape ( encodedQuery ) + "/isQueryBase64Compressed/true/timespan/" + timespan
2023-03-29 17:01:18 +01:00
return portalUrl , nil
}
2024-10-09 14:21:10 +01:00
func getTracesQueryUrl ( azurePortalUrl string ) ( string , error ) {
2023-04-27 20:24:11 +01:00
portalUrl := azurePortalUrl
portalUrl += "/#view/AppInsightsExtension/DetailsV2Blade/ComponentId~/"
2024-10-09 14:21:10 +01:00
resource := "%7B%22ResourceId%22:%22${__data.fields.resource:percentencode}%22%7D"
portalUrl += resource
2023-04-27 20:24:11 +01:00
portalUrl += "/DataModel~/"
// We're making use of data link variables to select the necessary fields in the frontend
eventId := "%22eventId%22%3A%22${__data.fields.itemId}%22%2C"
timestamp := "%22timestamp%22%3A%22${__data.fields.startTime}%22%2C"
eventTable := "%22eventTable%22%3A%22${__data.fields.itemType}%22"
traceObject := fmt . Sprintf ( "%%7B%s%s%s%%7D" , eventId , timestamp , eventTable )
portalUrl += traceObject
return portalUrl , nil
}
2023-10-04 16:38:26 -07:00
func getCorrelationWorkspaces ( ctx context . Context , baseResource string , resourcesMap map [ string ] bool , dsInfo types . DatasourceInfo , operationId string ) ( map [ string ] bool , error ) {
2023-04-27 20:24:11 +01:00
azMonService := dsInfo . Services [ "Azure Monitor" ]
2023-05-09 15:29:36 +01:00
correlationUrl := azMonService . URL + fmt . Sprintf ( "%s/providers/microsoft.insights/transactions/%s" , baseResource , operationId )
2023-04-27 20:24:11 +01:00
callCorrelationAPI := func ( url string ) ( AzureCorrelationAPIResponse , error ) {
req , err := http . NewRequestWithContext ( ctx , http . MethodPost , url , bytes . NewBuffer ( [ ] byte { } ) )
if err != nil {
return AzureCorrelationAPIResponse { } , fmt . Errorf ( "%v: %w" , "failed to create request" , err )
}
req . URL . Path = url
req . Header . Set ( "Content-Type" , "application/json" )
values := req . URL . Query ( )
values . Add ( "api-version" , "2019-10-17-preview" )
req . URL . RawQuery = values . Encode ( )
req . Method = "GET"
2023-10-04 16:38:26 -07:00
_ , span := tracing . DefaultTracer ( ) . Start ( ctx , "azure traces correlation request" , trace . WithAttributes (
2023-10-03 14:54:20 +02:00
attribute . String ( "target" , req . URL . String ( ) ) ,
attribute . Int64 ( "datasource_id" , dsInfo . DatasourceID ) ,
attribute . Int64 ( "org_id" , dsInfo . OrgID ) ,
) )
2023-04-27 20:24:11 +01:00
defer span . End ( )
res , err := azMonService . HTTPClient . Do ( req )
if err != nil {
2024-12-06 16:38:09 +01:00
return AzureCorrelationAPIResponse { } , backend . DownstreamError ( err )
2023-04-27 20:24:11 +01:00
}
body , err := io . ReadAll ( res . Body )
if err != nil {
2024-12-06 16:38:09 +01:00
return AzureCorrelationAPIResponse { } , backend . DownstreamError ( err )
2023-04-27 20:24:11 +01:00
}
2023-08-30 08:39:31 -05:00
2023-04-27 20:24:11 +01:00
defer func ( ) {
2023-08-30 08:39:31 -05:00
if err := res . Body . Close ( ) ; err != nil {
2024-01-26 14:53:55 -08:00
azMonService . Logger . Warn ( "Failed to close response body" , "err" , err )
2023-04-27 20:24:11 +01:00
}
} ( )
if res . StatusCode / 100 != 2 {
2025-01-09 11:33:47 +00:00
if res . StatusCode == 404 {
return AzureCorrelationAPIResponse { } , backend . DownstreamError ( fmt . Errorf ( "requested trace not found by Application Insights indexing. Select the relevant Application Insights resource to search for the Operation ID directly" ) )
}
2024-12-06 16:38:09 +01:00
return AzureCorrelationAPIResponse { } , utils . CreateResponseErrorFromStatusCode ( res . StatusCode , res . Status , body )
2023-04-27 20:24:11 +01:00
}
var data AzureCorrelationAPIResponse
d := json . NewDecoder ( bytes . NewReader ( body ) )
d . UseNumber ( )
err = d . Decode ( & data )
if err != nil {
return AzureCorrelationAPIResponse { } , err
}
2023-05-09 15:29:36 +01:00
for _ , resource := range data . Properties . Resources {
lowerCaseResource := strings . ToLower ( resource )
if _ , ok := resourcesMap [ lowerCaseResource ] ; ! ok {
resourcesMap [ lowerCaseResource ] = true
}
}
2023-04-27 20:24:11 +01:00
return data , nil
}
var nextLink * string
var correlationResponse AzureCorrelationAPIResponse
correlationResponse , err := callCorrelationAPI ( correlationUrl )
if err != nil {
2023-05-09 15:29:36 +01:00
return nil , err
2023-04-27 20:24:11 +01:00
}
nextLink = correlationResponse . Properties . NextLink
for nextLink != nil {
correlationResponse , err := callCorrelationAPI ( correlationUrl )
if err != nil {
2023-05-09 15:29:36 +01:00
return nil , err
2023-04-27 20:24:11 +01:00
}
nextLink = correlationResponse . Properties . NextLink
}
2023-05-09 15:29:36 +01:00
// Remove the base element as that's where the query is run anyway
delete ( resourcesMap , strings . ToLower ( baseResource ) )
return resourcesMap , nil
2023-04-27 20:24:11 +01:00
}
2020-06-05 12:32:10 -04:00
// GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an
// error if there is no table by that name.
2022-03-02 06:41:07 -08:00
func ( ar * AzureLogAnalyticsResponse ) GetPrimaryResultTable ( ) ( * types . AzureResponseTable , error ) {
2020-06-05 12:32:10 -04:00
for _ , t := range ar . Tables {
if t . Name == "PrimaryResult" {
return & t , nil
}
}
2020-09-22 12:00:59 -06:00
return nil , fmt . Errorf ( "no data as PrimaryResult table is missing from the response" )
2020-06-05 12:32:10 -04:00
}
2023-08-30 08:39:31 -05:00
func ( e * AzureLogAnalyticsDatasource ) unmarshalResponse ( res * http . Response ) ( AzureLogAnalyticsResponse , error ) {
2022-08-10 13:37:51 +00:00
body , err := io . ReadAll ( res . Body )
2020-04-27 17:43:02 +02:00
if err != nil {
return AzureLogAnalyticsResponse { } , err
}
2020-12-15 09:32:06 +01:00
defer func ( ) {
2023-09-11 09:02:44 -05:00
if err := res . Body . Close ( ) ; err != nil {
2024-01-26 14:53:55 -08:00
e . Logger . Warn ( "Failed to close response body" , "err" , err )
2023-09-11 09:02:44 -05:00
}
2020-12-15 09:32:06 +01:00
} ( )
2020-04-27 17:43:02 +02:00
if res . StatusCode / 100 != 2 {
2024-12-06 16:38:09 +01:00
return AzureLogAnalyticsResponse { } , utils . CreateResponseErrorFromStatusCode ( res . StatusCode , res . Status , body )
2020-04-27 17:43:02 +02:00
}
var data AzureLogAnalyticsResponse
2020-06-05 12:32:10 -04:00
d := json . NewDecoder ( bytes . NewReader ( body ) )
d . UseNumber ( )
err = d . Decode ( & data )
2020-04-27 17:43:02 +02:00
if err != nil {
return AzureLogAnalyticsResponse { } , err
}
return data , nil
}
2020-06-29 16:20:24 -04:00
// LogAnalyticsMeta is a type for the a Frame's Meta's Custom property.
type LogAnalyticsMeta struct {
2023-10-18 10:19:35 -05:00
ColumnTypes [ ] string ` json:"azureColumnTypes" `
AzurePortalLink string ` json:"azurePortalLink,omitempty" `
2020-04-27 17:43:02 +02:00
}
2020-05-11 19:11:03 +02:00
2020-06-05 12:32:10 -04:00
// encodeQuery encodes the query in gzip so the frontend can build links.
2023-03-29 17:01:18 +01:00
func encodeQuery ( rawQuery string ) ( string , error ) {
2020-05-11 19:11:03 +02:00
var b bytes . Buffer
gz := gzip . NewWriter ( & b )
if _ , err := gz . Write ( [ ] byte ( rawQuery ) ) ; err != nil {
2023-03-29 17:01:18 +01:00
return "" , err
2020-05-11 19:11:03 +02:00
}
if err := gz . Close ( ) ; err != nil {
2023-03-29 17:01:18 +01:00
return "" , err
2020-05-11 19:11:03 +02:00
}
2023-03-29 17:01:18 +01:00
return base64 . StdEncoding . EncodeToString ( b . Bytes ( ) ) , nil
2020-05-11 19:11:03 +02:00
}