AzureMonitor: Use plugin SDK contracts (#34729)

This commit is contained in:
Andres Martinez Gotor 2021-06-07 14:54:51 +02:00 committed by GitHub
parent e8bc48a796
commit d225323049
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 652 additions and 735 deletions

View File

@ -13,8 +13,10 @@ import (
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/securejsondata"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
@ -26,8 +28,6 @@ import (
// ApplicationInsightsDatasource calls the application insights query API.
type ApplicationInsightsDatasource struct {
httpClient *http.Client
dsInfo *models.DataSource
pluginManager plugins.Manager
cfg *setting.Cfg
}
@ -37,6 +37,7 @@ type ApplicationInsightsDatasource struct {
// used to parse the response.
type ApplicationInsightsQuery struct {
RefID string
TimeRange backend.TimeRange
// Text based raw query options.
ApiURL string
@ -50,45 +51,31 @@ type ApplicationInsightsQuery struct {
aggregation string
}
// nolint:staticcheck // plugins.DataQueryResult deprecated
func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context,
originalQueries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) (plugins.DataResponse, error) {
result := plugins.DataResponse{
Results: map[string]plugins.DataQueryResult{},
}
originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
queries, err := e.buildQueries(originalQueries, timeRange)
queries, err := e.buildQueries(originalQueries)
if err != nil {
return plugins.DataResponse{}, err
return nil, err
}
for _, query := range queries {
queryRes, err := e.executeQuery(ctx, query)
queryRes, err := e.executeQuery(ctx, query, dsInfo)
if err != nil {
return plugins.DataResponse{}, err
return nil, err
}
result.Results[query.RefID] = queryRes
result.Responses[query.RefID] = queryRes
}
return result, nil
}
func (e *ApplicationInsightsDatasource) buildQueries(queries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) ([]*ApplicationInsightsQuery, error) {
func (e *ApplicationInsightsDatasource) buildQueries(queries []backend.DataQuery) ([]*ApplicationInsightsQuery, error) {
applicationInsightsQueries := []*ApplicationInsightsQuery{}
startTime, err := timeRange.ParseFrom()
if err != nil {
return nil, err
}
endTime, err := timeRange.ParseTo()
if err != nil {
return nil, err
}
for _, query := range queries {
queryBytes, err := query.Model.Encode()
queryBytes, err := query.JSON.MarshalJSON()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Application Insights query into JSON: %w", err)
}
@ -108,14 +95,14 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []plugins.DataSubQu
// Previous versions of the query model don't specify a time grain, so we
// need to fallback to a default value
if timeGrain == "auto" || timeGrain == "" {
timeGrain, err = setAutoTimeGrain(query.IntervalMS, timeGrains)
timeGrain, err = setAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339)))
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
if timeGrain != "none" {
params.Add("interval", timeGrain)
}
@ -131,6 +118,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []plugins.DataSubQu
}
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{
RefID: query.RefID,
TimeRange: query.TimeRange,
ApiURL: azureURL,
Params: params,
Alias: insightsJSONModel.Alias,
@ -144,15 +132,14 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []plugins.DataSubQu
return applicationInsightsQueries, nil
}
// nolint:staticcheck // plugins.DataQueryResult deprecated
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery) (
plugins.DataQueryResult, error) {
queryResult := plugins.DataQueryResult{Meta: simplejson.New(), RefID: query.RefID}
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo datasourceInfo) (
backend.DataResponse, error) {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, e.dsInfo)
req, err := e.createRequest(ctx, dsInfo)
if err != nil {
queryResult.Error = err
return queryResult, nil
dataResponse.Error = err
return dataResponse, nil
}
req.URL.Path = path.Join(req.URL.Path, query.ApiURL)
@ -160,8 +147,10 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
span, ctx := opentracing.StartSpanFromContext(ctx, "application insights query")
span.SetTag("target", query.Target)
span.SetTag("datasource_id", e.dsInfo.Id)
span.SetTag("org_id", e.dsInfo.OrgId)
span.SetTag("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond))
span.SetTag("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond))
span.SetTag("datasource_id", dsInfo.DatasourceID)
span.SetTag("org_id", dsInfo.OrgID)
defer span.Finish()
@ -175,10 +164,10 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
}
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String())
res, err := ctxhttp.Do(ctx, e.httpClient, req)
res, err := ctxhttp.Do(ctx, dsInfo.HTTPClient, req)
if err != nil {
queryResult.Error = err
return queryResult, nil
dataResponse.Error = err
return dataResponse, nil
}
body, err := ioutil.ReadAll(res.Body)
@ -188,48 +177,47 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
}
}()
if err != nil {
return plugins.DataQueryResult{}, err
return backend.DataResponse{}, err
}
if res.StatusCode/100 != 2 {
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return plugins.DataQueryResult{}, fmt.Errorf("request failed, status: %s", res.Status)
return backend.DataResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
mr := MetricsResult{}
err = json.Unmarshal(body, &mr)
if err != nil {
return plugins.DataQueryResult{}, err
return backend.DataResponse{}, err
}
frame, err := InsightsMetricsResultToFrame(mr, query.metricName, query.aggregation, query.dimensions)
if err != nil {
queryResult.Error = err
return queryResult, nil
dataResponse.Error = err
return dataResponse, nil
}
applyInsightsMetricAlias(frame, query.Alias)
queryResult.Dataframes = plugins.NewDecodedDataFrames(data.Frames{frame})
return queryResult, nil
dataResponse.Frames = data.Frames{frame}
return dataResponse, nil
}
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo) (*http.Request, error) {
// find plugin
plugin := e.pluginManager.GetDataSource(dsInfo.Type)
plugin := e.pluginManager.GetDataSource(dsName)
if plugin == nil {
return nil, errors.New("unable to find datasource plugin Azure Application Insights")
}
appInsightsRoute, routeName, err := e.getPluginRoute(plugin)
appInsightsRoute, routeName, err := e.getPluginRoute(plugin, dsInfo)
if err != nil {
return nil, err
}
appInsightsAppID := dsInfo.JsonData.Get("appInsightsAppId").MustString()
proxyPass := fmt.Sprintf("%s/v1/apps/%s", routeName, appInsightsAppID)
appInsightsAppID := dsInfo.Settings.AppInsightsAppId
u, err := url.Parse(dsInfo.Url)
u, err := url.Parse(dsInfo.URL)
if err != nil {
return nil, err
}
@ -241,13 +229,18 @@ func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInf
return nil, errutil.Wrap("Failed to create request", err)
}
pluginproxy.ApplyRoute(ctx, req, proxyPass, appInsightsRoute, dsInfo, e.cfg)
// TODO: Use backend authentication instead
proxyPass := fmt.Sprintf("%s/v1/apps/%s", routeName, appInsightsAppID)
pluginproxy.ApplyRoute(ctx, req, proxyPass, appInsightsRoute, &models.DataSource{
JsonData: simplejson.NewFromAny(dsInfo.JSONData),
SecureJsonData: securejsondata.GetEncryptedJsonData(dsInfo.DecryptedSecureJSONData),
}, e.cfg)
return req, nil
}
func (e *ApplicationInsightsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, e.dsInfo.JsonData)
func (e *ApplicationInsightsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, dsInfo datasourceInfo) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, dsInfo)
if err != nil {
return nil, "", err
}

View File

@ -2,14 +2,12 @@ package azuremonitor
import (
"encoding/json"
"fmt"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require"
@ -23,33 +21,28 @@ func TestApplicationInsightsDatasource(t *testing.T) {
Convey("Parse queries from frontend and build AzureMonitor API queries", func() {
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
tsdbQuery := plugins.DataQuery{
TimeRange: &plugins.DataTimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
Queries: []plugins.DataSubQuery{
tsdbQuery := []backend.DataQuery{
{
DataSource: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
TimeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
Model: simplejson.NewFromAny(map[string]interface{}{
"appInsights": map[string]interface{}{
JSON: []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "PT1M",
"aggregation": "Average",
"metricName": "server/exceptions",
"alias": "testalias",
"queryType": "Application Insights",
},
}),
"queryType": "Application Insights"
}
}`),
RefID: "A",
IntervalMS: 1234,
},
Interval: 1234,
},
}
Convey("and is a normal query", func() {
queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange)
queries, err := datasource.buildQueries(tsdbQuery)
So(err, ShouldBeNil)
So(len(queries), ShouldEqual, 1)
@ -64,66 +57,68 @@ func TestApplicationInsightsDatasource(t *testing.T) {
})
Convey("and has a time grain set to auto", func() {
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
"appInsights": map[string]interface{}{
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "auto",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights",
},
})
tsdbQuery.Queries[0].IntervalMS = 400000
"queryType": "Application Insights"
}
}`)
var err error
tsdbQuery[0].Interval, err = time.ParseDuration("400s")
require.NoError(t, err)
queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange)
queries, err := datasource.buildQueries(tsdbQuery)
So(err, ShouldBeNil)
So(queries[0].Params["interval"][0], ShouldEqual, "PT15M")
})
Convey("and has an empty time grain", func() {
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
"appInsights": map[string]interface{}{
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights",
},
})
tsdbQuery.Queries[0].IntervalMS = 400000
"queryType": "Application Insights"
}
}`)
tsdbQuery[0].Interval, _ = time.ParseDuration("400s")
queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange)
queries, err := datasource.buildQueries(tsdbQuery)
So(err, ShouldBeNil)
So(queries[0].Params["interval"][0], ShouldEqual, "PT15M")
})
Convey("and has a time grain set to auto and the metric has a limited list of allowed time grains", func() {
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
"appInsights": map[string]interface{}{
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "auto",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights",
"allowedTimeGrainsMs": []int64{60000, 300000},
},
})
tsdbQuery.Queries[0].IntervalMS = 400000
"allowedTimeGrainsMs": [60000, 300000]
}
}`)
tsdbQuery[0].Interval, _ = time.ParseDuration("400s")
queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange)
queries, err := datasource.buildQueries(tsdbQuery)
So(err, ShouldBeNil)
So(queries[0].Params["interval"][0], ShouldEqual, "PT5M")
})
Convey("and has a dimension filter", func() {
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
"appInsights": map[string]interface{}{
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "PT1M",
"aggregation": "Average",
@ -131,11 +126,11 @@ func TestApplicationInsightsDatasource(t *testing.T) {
"alias": "testalias",
"queryType": "Application Insights",
"dimension": "blob",
"dimensionFilter": "blob eq '*'",
},
})
"dimensionFilter": "blob eq '*'"
}
}`)
queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange)
queries, err := datasource.buildQueries(tsdbQuery)
So(err, ShouldBeNil)
So(queries[0].Target, ShouldEqual, "aggregation=Average&filter=blob+eq+%27%2A%27&interval=PT1M&segment=blob&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
@ -143,19 +138,19 @@ func TestApplicationInsightsDatasource(t *testing.T) {
})
Convey("and has a dimension filter set to None", func() {
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
"appInsights": map[string]interface{}{
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "PT1M",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights",
"dimension": "None",
},
})
"dimension": "None"
}
}`)
queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange)
queries, err := datasource.buildQueries(tsdbQuery)
So(err, ShouldBeNil)
So(queries[0].Target, ShouldEqual, "aggregation=Average&interval=PT1M&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
@ -198,20 +193,21 @@ func TestAppInsightsPluginRoutes(t *testing.T) {
tests := []struct {
name string
datasource *ApplicationInsightsDatasource
dsInfo datasourceInfo
expectedRouteName string
expectedRouteURL string
Err require.ErrorAssertionFunc
}{
{
name: "plugin proxy route for the Azure public cloud",
dsInfo: datasourceInfo{
Settings: azureMonitorSettings{
AzureAuthType: AzureAuthClientSecret,
CloudName: "azuremonitor",
},
},
datasource: &ApplicationInsightsDatasource{
cfg: cfg,
dsInfo: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"azureAuthType": AzureAuthClientSecret,
"cloudName": "azuremonitor",
}),
},
},
expectedRouteName: "appinsights",
expectedRouteURL: "https://api.applicationinsights.io",
@ -219,14 +215,14 @@ func TestAppInsightsPluginRoutes(t *testing.T) {
},
{
name: "plugin proxy route for the Azure China cloud",
dsInfo: datasourceInfo{
Settings: azureMonitorSettings{
AzureAuthType: AzureAuthClientSecret,
CloudName: "chinaazuremonitor",
},
},
datasource: &ApplicationInsightsDatasource{
cfg: cfg,
dsInfo: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"azureAuthType": AzureAuthClientSecret,
"cloudName": "chinaazuremonitor",
}),
},
},
expectedRouteName: "chinaappinsights",
expectedRouteURL: "https://api.applicationinsights.azure.cn",
@ -236,7 +232,7 @@ func TestAppInsightsPluginRoutes(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
route, routeName, err := tt.datasource.getPluginRoute(plugin)
route, routeName, err := tt.datasource.getPluginRoute(plugin, tt.dsInfo)
tt.Err(t, err)
if diff := cmp.Diff(tt.expectedRouteURL, route.URL, cmpopts.EquateNaNs()); diff != "" {

View File

@ -12,9 +12,12 @@ import (
"net/url"
"path"
"regexp"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/securejsondata"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
@ -26,8 +29,6 @@ import (
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
type AzureLogAnalyticsDatasource struct {
httpClient *http.Client
dsInfo *models.DataSource
pluginManager plugins.Manager
cfg *setting.Cfg
}
@ -38,29 +39,26 @@ type AzureLogAnalyticsQuery struct {
RefID string
ResultFormat string
URL string
Model *simplejson.Json
JSON json.RawMessage
Params url.Values
Target string
TimeRange backend.TimeRange
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into the timeseries format
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) (plugins.DataResponse, error) {
result := plugins.DataResponse{
Results: map[string]plugins.DataQueryResult{},
}
// 3. parses the responses for each query into data frames
func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
queries, err := e.buildQueries(originalQueries, timeRange)
queries, err := e.buildQueries(originalQueries, dsInfo)
if err != nil {
return plugins.DataResponse{}, err
return nil, err
}
for _, query := range queries {
result.Results[query.RefID] = e.executeQuery(ctx, query, originalQueries, timeRange)
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo)
}
return result, nil
@ -89,18 +87,12 @@ func getApiURL(queryJSONModel logJSONQuery) string {
}
}
func (e *AzureLogAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) ([]*AzureLogAnalyticsQuery, error) {
func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureLogAnalyticsQuery, error) {
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{}
for _, query := range queries {
queryBytes, err := query.Model.Encode()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Log Analytics query into JSON: %w", err)
}
queryJSONModel := logJSONQuery{}
err = json.Unmarshal(queryBytes, &queryJSONModel)
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Log Analytics query object from JSON: %w", err)
}
@ -116,7 +108,7 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuer
apiURL := getApiURL(queryJSONModel)
params := url.Values{}
rawQuery, err := KqlInterpolate(query, timeRange, azureLogAnalyticsTarget.Query, "TimeGenerated")
rawQuery, err := KqlInterpolate(query, dsInfo, azureLogAnalyticsTarget.Query, "TimeGenerated")
if err != nil {
return nil, err
}
@ -126,23 +118,22 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuer
RefID: query.RefID,
ResultFormat: resultFormat,
URL: apiURL,
Model: query.Model,
JSON: query.JSON,
Params: params,
Target: params.Encode(),
TimeRange: query.TimeRange,
})
}
return azureLogAnalyticsQueries, nil
}
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery,
queries []plugins.DataSubQuery, timeRange plugins.DataTimeRange) plugins.DataQueryResult {
queryResult := plugins.DataQueryResult{RefID: query.RefID}
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo datasourceInfo) backend.DataResponse {
dataResponse := backend.DataResponse{}
queryResultErrorWithExecuted := func(err error) plugins.DataQueryResult {
queryResult.Error = err
frames := data.Frames{
dataResponseErrorWithExecuted := func(err error) backend.DataResponse {
dataResponse.Error = err
dataResponse.Frames = data.Frames{
&data.Frame{
RefID: query.RefID,
Meta: &data.FrameMeta{
@ -150,14 +141,13 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
},
},
}
queryResult.Dataframes = plugins.NewDecodedDataFrames(frames)
return queryResult
return dataResponse
}
req, err := e.createRequest(ctx, e.dsInfo)
req, err := e.createRequest(ctx, dsInfo)
if err != nil {
queryResult.Error = err
return queryResult
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
@ -165,10 +155,10 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
span, ctx := opentracing.StartSpanFromContext(ctx, "azure log analytics query")
span.SetTag("target", query.Target)
span.SetTag("from", timeRange.From)
span.SetTag("until", timeRange.To)
span.SetTag("datasource_id", e.dsInfo.Id)
span.SetTag("org_id", e.dsInfo.OrgId)
span.SetTag("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond))
span.SetTag("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond))
span.SetTag("datasource_id", dsInfo.DatasourceID)
span.SetTag("org_id", dsInfo.OrgID)
defer span.Finish()
@ -176,34 +166,39 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
span.Context(),
opentracing.HTTPHeaders,
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
azlog.Debug("AzureLogAnalytics", "Request ApiURL", req.URL.String())
res, err := ctxhttp.Do(ctx, e.httpClient, req)
res, err := ctxhttp.Do(ctx, dsInfo.HTTPClient, req)
if err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
logResponse, err := e.unmarshalResponse(res)
if err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
t, err := logResponse.GetPrimaryResultTable()
if err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
frame, err := ResponseTableToFrame(t)
if err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
model, err := simplejson.NewJson(query.JSON)
if err != nil {
return dataResponseErrorWithExecuted(err)
}
err = setAdditionalFrameMeta(frame,
query.Params.Get("query"),
query.Model.Get("subscriptionId").MustString(),
query.Model.Get("azureLogAnalytics").Get("workspace").MustString())
model.Get("subscriptionId").MustString(),
model.Get("azureLogAnalytics").Get("workspace").MustString())
if err != nil {
frame.AppendNotices(data.Notice{Severity: data.NoticeSeverityWarning, Text: "could not add custom metadata: " + err.Error()})
azlog.Warn("failed to add custom metadata to azure log analytics response", err)
@ -220,13 +215,23 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
}
}
}
frames := data.Frames{frame}
queryResult.Dataframes = plugins.NewDecodedDataFrames(frames)
return queryResult
dataResponse.Frames = data.Frames{frame}
return dataResponse
}
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
u, err := url.Parse(dsInfo.Url)
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo) (*http.Request, error) {
// find plugin
plugin := e.pluginManager.GetDataSource(dsName)
if plugin == nil {
return nil, errors.New("unable to find datasource plugin Azure Monitor")
}
logAnalyticsRoute, routeName, err := e.getPluginRoute(plugin, dsInfo)
if err != nil {
return nil, err
}
u, err := url.Parse(dsInfo.URL)
if err != nil {
return nil, err
}
@ -240,24 +245,17 @@ func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo
req.Header.Set("Content-Type", "application/json")
// find plugin
plugin := e.pluginManager.GetDataSource(dsInfo.Type)
if plugin == nil {
return nil, errors.New("unable to find datasource plugin Azure Monitor")
}
logAnalyticsRoute, routeName, err := e.getPluginRoute(plugin)
if err != nil {
return nil, err
}
pluginproxy.ApplyRoute(ctx, req, routeName, logAnalyticsRoute, dsInfo, e.cfg)
// TODO: Use backend authentication instead
pluginproxy.ApplyRoute(ctx, req, routeName, logAnalyticsRoute, &models.DataSource{
JsonData: simplejson.NewFromAny(dsInfo.JSONData),
SecureJsonData: securejsondata.GetEncryptedJsonData(dsInfo.DecryptedSecureJSONData),
}, e.cfg)
return req, nil
}
func (e *AzureLogAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, e.dsInfo.JsonData)
func (e *AzureLogAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, dsInfo datasourceInfo) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, dsInfo)
if err != nil {
return nil, "", err
}

View File

@ -8,8 +8,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require"
@ -18,36 +17,28 @@ import (
func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
datasource := &AzureLogAnalyticsDatasource{}
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
timeRange := plugins.DataTimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
}
timeRange := backend.TimeRange{From: fromStart, To: fromStart.Add(34 * time.Minute)}
tests := []struct {
name string
queryModel []plugins.DataSubQuery
timeRange plugins.DataTimeRange
queryModel []backend.DataQuery
azureLogAnalyticsQueries []*AzureLogAnalyticsQuery
Err require.ErrorAssertionFunc
}{
{
name: "Query with macros should be interpolated",
timeRange: timeRange,
queryModel: []plugins.DataSubQuery{
queryModel: []backend.DataQuery{
{
DataSource: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
},
Model: simplejson.NewFromAny(map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": map[string]interface{}{
"azureLogAnalytics": {
"resource": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
"resultFormat": timeSeries,
},
}),
"resultFormat": "%s"
}
}`, timeSeries)),
RefID: "A",
TimeRange: timeRange,
},
},
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
@ -55,15 +46,17 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
RefID: "A",
ResultFormat: timeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
Model: simplejson.NewFromAny(map[string]interface{}{
"azureLogAnalytics": map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": {
"resource": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
"resultFormat": timeSeries,
"workspace": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
},
}),
"resultFormat": "%s"
}
}`, timeSeries)),
Params: url.Values{"query": {"query=Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer"}},
Target: "query=query%3DPerf+%7C+where+%5B%27TimeGenerated%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27TimeGenerated%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+where+%5B%27Computer%27%5D+in+%28%27comp1%27%2C%27comp2%27%29+%7C+summarize+avg%28CounterValue%29+by+bin%28TimeGenerated%2C+34000ms%29%2C+Computer",
TimeRange: timeRange,
},
},
Err: require.NoError,
@ -71,20 +64,16 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
{
name: "Legacy queries with a workspace GUID should use workspace-centric url",
timeRange: timeRange,
queryModel: []plugins.DataSubQuery{
queryModel: []backend.DataQuery{
{
DataSource: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
},
Model: simplejson.NewFromAny(map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": map[string]interface{}{
"azureLogAnalytics": {
"workspace": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
"query": "query=Perf",
"resultFormat": timeSeries,
},
}),
"resultFormat": "%s"
}
}`, timeSeries)),
RefID: "A",
},
},
@ -93,13 +82,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
RefID: "A",
ResultFormat: timeSeries,
URL: "v1/workspaces/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/query",
Model: simplejson.NewFromAny(map[string]interface{}{
"azureLogAnalytics": map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": {
"workspace": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
"query": "query=Perf",
"resultFormat": timeSeries,
},
}),
"resultFormat": "%s"
}
}`, timeSeries)),
Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf",
},
@ -109,20 +99,16 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
{
name: "Legacy workspace queries with a resource URI (from a template variable) should use resource-centric url",
timeRange: timeRange,
queryModel: []plugins.DataSubQuery{
queryModel: []backend.DataQuery{
{
DataSource: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
},
Model: simplejson.NewFromAny(map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": map[string]interface{}{
"azureLogAnalytics": {
"workspace": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
"query": "query=Perf",
"resultFormat": timeSeries,
},
}),
"resultFormat": "%s"
}
}`, timeSeries)),
RefID: "A",
},
},
@ -131,13 +117,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
RefID: "A",
ResultFormat: timeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
Model: simplejson.NewFromAny(map[string]interface{}{
"azureLogAnalytics": map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": {
"workspace": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
"query": "query=Perf",
"resultFormat": timeSeries,
},
}),
"resultFormat": "%s"
}
}`, timeSeries)),
Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf",
},
@ -147,20 +134,16 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
{
name: "Queries with a Resource should use resource-centric url",
timeRange: timeRange,
queryModel: []plugins.DataSubQuery{
queryModel: []backend.DataQuery{
{
DataSource: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
},
Model: simplejson.NewFromAny(map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": map[string]interface{}{
"azureLogAnalytics": {
"resource": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
"query": "query=Perf",
"resultFormat": timeSeries,
},
}),
"resultFormat": "%s"
}
}`, timeSeries)),
RefID: "A",
},
},
@ -169,13 +152,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
RefID: "A",
ResultFormat: timeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
Model: simplejson.NewFromAny(map[string]interface{}{
"azureLogAnalytics": map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
"azureLogAnalytics": {
"resource": "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace",
"query": "query=Perf",
"resultFormat": timeSeries,
},
}),
"resultFormat": "%s"
}
}`, timeSeries)),
Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf",
},
@ -186,9 +170,9 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(tt.queryModel, tt.timeRange)
queries, err := datasource.buildQueries(tt.queryModel, datasourceInfo{})
tt.Err(t, err)
if diff := cmp.Diff(tt.azureLogAnalyticsQueries, queries, cmpopts.IgnoreUnexported(simplejson.Json{})); diff != "" {
if diff := cmp.Diff(tt.azureLogAnalyticsQueries[0], queries[0]); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
@ -234,6 +218,7 @@ func TestPluginRoutes(t *testing.T) {
tests := []struct {
name string
dsInfo datasourceInfo
datasource *AzureLogAnalyticsDatasource
expectedProxypass string
expectedRouteURL string
@ -241,14 +226,14 @@ func TestPluginRoutes(t *testing.T) {
}{
{
name: "plugin proxy route for the Azure public cloud",
dsInfo: datasourceInfo{
Settings: azureMonitorSettings{
AzureAuthType: AzureAuthClientSecret,
CloudName: "azuremonitor",
},
},
datasource: &AzureLogAnalyticsDatasource{
cfg: cfg,
dsInfo: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"azureAuthType": AzureAuthClientSecret,
"cloudName": "azuremonitor",
}),
},
},
expectedProxypass: "loganalyticsazure",
expectedRouteURL: "https://api.loganalytics.io/",
@ -256,14 +241,14 @@ func TestPluginRoutes(t *testing.T) {
},
{
name: "plugin proxy route for the Azure China cloud",
dsInfo: datasourceInfo{
Settings: azureMonitorSettings{
AzureAuthType: AzureAuthClientSecret,
CloudName: "chinaazuremonitor",
},
},
datasource: &AzureLogAnalyticsDatasource{
cfg: cfg,
dsInfo: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"azureAuthType": AzureAuthClientSecret,
"cloudName": "chinaazuremonitor",
}),
},
},
expectedProxypass: "chinaloganalyticsazure",
expectedRouteURL: "https://api.loganalytics.azure.cn/",
@ -271,14 +256,14 @@ func TestPluginRoutes(t *testing.T) {
},
{
name: "plugin proxy route for the Azure Gov cloud",
dsInfo: datasourceInfo{
Settings: azureMonitorSettings{
AzureAuthType: AzureAuthClientSecret,
CloudName: "govazuremonitor",
},
},
datasource: &AzureLogAnalyticsDatasource{
cfg: cfg,
dsInfo: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"azureAuthType": AzureAuthClientSecret,
"cloudName": "govazuremonitor",
}),
},
},
expectedProxypass: "govloganalyticsazure",
expectedRouteURL: "https://api.loganalytics.us/",
@ -288,7 +273,7 @@ func TestPluginRoutes(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
route, proxypass, err := tt.datasource.getPluginRoute(plugin)
route, proxypass, err := tt.datasource.getPluginRoute(plugin, tt.dsInfo)
tt.Err(t, err)
if diff := cmp.Diff(tt.expectedRouteURL, route.URL, cmpopts.EquateNaNs()); diff != "" {

View File

@ -2,6 +2,7 @@ package azuremonitor
import (
"bytes"
"time"
"context"
"encoding/json"
@ -15,6 +16,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/securejsondata"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
@ -26,8 +28,6 @@ import (
// AzureResourceGraphDatasource calls the Azure Resource Graph API's
type AzureResourceGraphDatasource struct {
httpClient *http.Client
dsInfo *models.DataSource
pluginManager plugins.Manager
cfg *setting.Cfg
}
@ -38,8 +38,9 @@ type AzureResourceGraphQuery struct {
RefID string
ResultFormat string
URL string
Model *simplejson.Json
JSON json.RawMessage
InterpolatedQuery string
TimeRange backend.TimeRange
}
const argAPIVersion = "2018-09-01-preview"
@ -48,37 +49,30 @@ const argQueryProviderName = "/providers/Microsoft.ResourceGraph/resources"
// executeTimeSeriesQuery does the following:
// 1. builds the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into the timeseries format
func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) (backend.QueryDataResponse, error) {
result := backend.QueryDataResponse{
// 3. parses the responses for each query into data frames
func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
result := &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{},
}
queries, err := e.buildQueries(originalQueries, timeRange)
queries, err := e.buildQueries(originalQueries, dsInfo)
if err != nil {
return backend.QueryDataResponse{}, err
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, query, timeRange)
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo)
}
return result, nil
}
func (e *AzureResourceGraphDatasource) buildQueries(queries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) ([]*AzureResourceGraphQuery, error) {
func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureResourceGraphQuery, error) {
var azureResourceGraphQueries []*AzureResourceGraphQuery
for _, query := range queries {
queryBytes, err := query.Model.Encode()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Resource Graph query into JSON: %w", err)
}
queryJSONModel := argJSONQuery{}
err = json.Unmarshal(queryBytes, &queryJSONModel)
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Resource Graph query object from JSON: %w", err)
}
@ -91,7 +85,7 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []plugins.DataSubQue
resultFormat = "table"
}
interpolatedQuery, err := KqlInterpolate(query, timeRange, azureResourceGraphTarget.Query)
interpolatedQuery, err := KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query)
if err != nil {
return nil, err
@ -100,23 +94,23 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []plugins.DataSubQue
azureResourceGraphQueries = append(azureResourceGraphQueries, &AzureResourceGraphQuery{
RefID: query.RefID,
ResultFormat: resultFormat,
Model: query.Model,
JSON: query.JSON,
InterpolatedQuery: interpolatedQuery,
TimeRange: query.TimeRange,
})
}
return azureResourceGraphQueries, nil
}
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery,
timeRange plugins.DataTimeRange) backend.DataResponse {
queryResult := backend.DataResponse{}
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo datasourceInfo) backend.DataResponse {
dataResponse := backend.DataResponse{}
params := url.Values{}
params.Add("api-version", argAPIVersion)
queryResultErrorWithExecuted := func(err error) backend.DataResponse {
queryResult = backend.DataResponse{Error: err}
dataResponseErrorWithExecuted := func(err error) backend.DataResponse {
dataResponse = backend.DataResponse{Error: err}
frames := data.Frames{
&data.Frame{
RefID: query.RefID,
@ -125,25 +119,31 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
},
},
}
queryResult.Frames = frames
return queryResult
dataResponse.Frames = frames
return dataResponse
}
model, err := simplejson.NewJson(query.JSON)
if err != nil {
dataResponse.Error = err
return dataResponse
}
reqBody, err := json.Marshal(map[string]interface{}{
"subscriptions": query.Model.Get("subscriptions").MustStringArray(),
"subscriptions": model.Get("subscriptions").MustStringArray(),
"query": query.InterpolatedQuery,
})
if err != nil {
queryResult.Error = err
return queryResult
dataResponse.Error = err
return dataResponse
}
req, err := e.createRequest(ctx, e.dsInfo, reqBody)
req, err := e.createRequest(ctx, dsInfo, reqBody)
if err != nil {
queryResult.Error = err
return queryResult
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, argQueryProviderName)
@ -151,10 +151,10 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
span, ctx := opentracing.StartSpanFromContext(ctx, "azure resource graph query")
span.SetTag("interpolated_query", query.InterpolatedQuery)
span.SetTag("from", timeRange.From)
span.SetTag("until", timeRange.To)
span.SetTag("datasource_id", e.dsInfo.Id)
span.SetTag("org_id", e.dsInfo.OrgId)
span.SetTag("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond))
span.SetTag("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond))
span.SetTag("datasource_id", dsInfo.DatasourceID)
span.SetTag("org_id", dsInfo.OrgID)
defer span.Finish()
@ -162,35 +162,46 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
span.Context(),
opentracing.HTTPHeaders,
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
azlog.Debug("AzureResourceGraph", "Request ApiURL", req.URL.String())
res, err := ctxhttp.Do(ctx, e.httpClient, req)
res, err := ctxhttp.Do(ctx, dsInfo.HTTPClient, req)
if err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
argResponse, err := e.unmarshalResponse(res)
if err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
frame, err := ResponseTableToFrame(&argResponse.Data)
if err != nil {
return queryResultErrorWithExecuted(err)
return dataResponseErrorWithExecuted(err)
}
if frame.Meta == nil {
frame.Meta = &data.FrameMeta{}
}
frame.Meta.ExecutedQueryString = req.URL.RawQuery
queryResult.Frames = data.Frames{frame}
return queryResult
dataResponse.Frames = data.Frames{frame}
return dataResponse
}
func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource, reqBody []byte) (*http.Request, error) {
u, err := url.Parse(dsInfo.Url)
func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, reqBody []byte) (*http.Request, error) {
// find plugin
plugin := e.pluginManager.GetDataSource(dsName)
if plugin == nil {
return nil, errors.New("unable to find datasource plugin Azure Monitor")
}
argRoute, routeName, err := e.getPluginRoute(plugin, dsInfo)
if err != nil {
return nil, err
}
u, err := url.Parse(dsInfo.URL)
if err != nil {
return nil, err
}
@ -204,24 +215,17 @@ func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo
req.Header.Set("Content-Type", "application/json")
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
// find plugin
plugin := e.pluginManager.GetDataSource(dsInfo.Type)
if plugin == nil {
return nil, errors.New("unable to find datasource plugin Azure Monitor")
}
argRoute, routeName, err := e.getPluginRoute(plugin)
if err != nil {
return nil, err
}
pluginproxy.ApplyRoute(ctx, req, routeName, argRoute, dsInfo, e.cfg)
// TODO: Use backend authentication instead
pluginproxy.ApplyRoute(ctx, req, routeName, argRoute, &models.DataSource{
JsonData: simplejson.NewFromAny(dsInfo.JSONData),
SecureJsonData: securejsondata.GetEncryptedJsonData(dsInfo.DecryptedSecureJSONData),
}, e.cfg)
return req, nil
}
func (e *AzureResourceGraphDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, e.dsInfo.JsonData)
func (e *AzureResourceGraphDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, dsInfo datasourceInfo) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, dsInfo)
if err != nil {
return nil, "", err
}

View File

@ -7,8 +7,8 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/stretchr/testify/require"
)
@ -19,7 +19,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
tests := []struct {
name string
queryModel []plugins.DataSubQuery
queryModel []backend.DataQuery
timeRange plugins.DataTimeRange
azureResourceGraphQueries []*AzureResourceGraphQuery
Err require.ErrorAssertionFunc
@ -30,18 +30,15 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
queryModel: []plugins.DataSubQuery{
queryModel: []backend.DataQuery{
{
DataSource: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
},
Model: simplejson.NewFromAny(map[string]interface{}{
JSON: []byte(`{
"queryType": "Azure Resource Graph",
"azureResourceGraph": map[string]interface{}{
"azureResourceGraph": {
"query": "resources | where $__contains(name,'res1','res2')",
"resultFormat": "table",
},
}),
"resultFormat": "table"
}
}`),
RefID: "A",
},
},
@ -50,12 +47,13 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
RefID: "A",
ResultFormat: "table",
URL: "",
Model: simplejson.NewFromAny(map[string]interface{}{
"azureResourceGraph": map[string]interface{}{
JSON: []byte(`{
"queryType": "Azure Resource Graph",
"azureResourceGraph": {
"query": "resources | where $__contains(name,'res1','res2')",
"resultFormat": "table",
},
}),
"resultFormat": "table"
}
}`),
InterpolatedQuery: "resources | where ['name'] in ('res1','res2')",
},
},
@ -65,7 +63,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(tt.queryModel, tt.timeRange)
queries, err := datasource.buildQueries(tt.queryModel, datasourceInfo{})
tt.Err(t, err)
if diff := cmp.Diff(tt.azureResourceGraphQueries, queries, cmpopts.IgnoreUnexported(simplejson.Json{})); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)

View File

@ -13,8 +13,11 @@ import (
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/securejsondata"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting"
@ -25,8 +28,6 @@ import (
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
httpClient *http.Client
dsInfo *models.DataSource
pluginManager plugins.Manager
cfg *setting.Cfg
}
@ -41,58 +42,40 @@ const azureMonitorAPIVersion = "2018-01-01"
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into the timeseries format
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) (plugins.DataResponse, error) {
result := plugins.DataResponse{
Results: map[string]plugins.DataQueryResult{},
}
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
queries, err := e.buildQueries(originalQueries, timeRange)
queries, err := e.buildQueries(originalQueries, dsInfo)
if err != nil {
return plugins.DataResponse{}, err
return nil, err
}
for _, query := range queries {
queryRes, resp, err := e.executeQuery(ctx, query, originalQueries, timeRange)
queryRes, resp, err := e.executeQuery(ctx, query, dsInfo)
if err != nil {
return plugins.DataResponse{}, err
return nil, err
}
frames, err := e.parseResponse(resp, query)
if err != nil {
queryRes.Error = err
} else {
queryRes.Dataframes = frames
queryRes.Frames = frames
}
result.Results[query.RefID] = queryRes
result.Responses[query.RefID] = queryRes
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(queries []plugins.DataSubQuery, timeRange plugins.DataTimeRange) ([]*AzureMonitorQuery, error) {
func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureMonitorQuery, error) {
azureMonitorQueries := []*AzureMonitorQuery{}
startTime, err := timeRange.ParseFrom()
if err != nil {
return nil, err
}
endTime, err := timeRange.ParseTo()
if err != nil {
return nil, err
}
for _, query := range queries {
var target string
queryBytes, err := query.Model.Encode()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Monitor query into JSON: %w", err)
}
queryJSONModel := azureMonitorJSONQuery{}
err = json.Unmarshal(queryBytes, &queryJSONModel)
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
@ -106,7 +89,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []plugins.DataSubQuery, ti
urlComponents["resourceName"] = azJSONModel.ResourceName
ub := urlBuilder{
DefaultSubscription: query.DataSource.JsonData.Get("subscriptionId").MustString(),
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: queryJSONModel.AzureMonitor.ResourceGroup,
MetricDefinition: azJSONModel.MetricDefinition,
@ -119,7 +102,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []plugins.DataSubQuery, ti
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = setAutoTimeGrain(query.IntervalMS, timeGrains)
timeGrain, err = setAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
@ -127,7 +110,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []plugins.DataSubQuery, ti
params := url.Values{}
params.Add("api-version", azureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339)))
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName) // MetricName or MetricNames ?
@ -168,21 +151,20 @@ func (e *AzureMonitorDatasource) buildQueries(queries []plugins.DataSubQuery, ti
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
})
}
return azureMonitorQueries, nil
}
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, queries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) (plugins.DataQueryResult, AzureMonitorResponse, error) {
queryResult := plugins.DataQueryResult{RefID: query.RefID}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, dsInfo datasourceInfo) (backend.DataResponse, AzureMonitorResponse, error) {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, e.dsInfo)
req, err := e.createRequest(ctx, dsInfo)
if err != nil {
queryResult.Error = err
return queryResult, AzureMonitorResponse{}, nil
dataResponse.Error = err
return dataResponse, AzureMonitorResponse{}, nil
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
@ -190,10 +172,10 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
span, ctx := opentracing.StartSpanFromContext(ctx, "azuremonitor query")
span.SetTag("target", query.Target)
span.SetTag("from", timeRange.From)
span.SetTag("until", timeRange.To)
span.SetTag("datasource_id", e.dsInfo.Id)
span.SetTag("org_id", e.dsInfo.OrgId)
span.SetTag("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond))
span.SetTag("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond))
span.SetTag("datasource_id", dsInfo.DatasourceID)
span.SetTag("org_id", dsInfo.OrgID)
defer span.Finish()
@ -201,16 +183,16 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
span.Context(),
opentracing.HTTPHeaders,
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
queryResult.Error = err
return queryResult, AzureMonitorResponse{}, nil
dataResponse.Error = err
return dataResponse, AzureMonitorResponse{}, nil
}
azlog.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
azlog.Debug("AzureMonitor", "Target", query.Target)
res, err := ctxhttp.Do(ctx, e.httpClient, req)
res, err := ctxhttp.Do(ctx, dsInfo.HTTPClient, req)
if err != nil {
queryResult.Error = err
return queryResult, AzureMonitorResponse{}, nil
dataResponse.Error = err
return dataResponse, AzureMonitorResponse{}, nil
}
defer func() {
if err := res.Body.Close(); err != nil {
@ -220,28 +202,26 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
data, err := e.unmarshalResponse(res)
if err != nil {
queryResult.Error = err
return queryResult, AzureMonitorResponse{}, nil
dataResponse.Error = err
return dataResponse, AzureMonitorResponse{}, nil
}
return queryResult, data, nil
return dataResponse, data, nil
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo) (*http.Request, error) {
// find plugin
plugin := e.pluginManager.GetDataSource(dsInfo.Type)
plugin := e.pluginManager.GetDataSource(dsName)
if plugin == nil {
return nil, errors.New("unable to find datasource plugin Azure Monitor")
}
azureMonitorRoute, routeName, err := e.getPluginRoute(plugin)
azureMonitorRoute, routeName, err := e.getPluginRoute(plugin, dsInfo)
if err != nil {
return nil, err
}
proxyPass := fmt.Sprintf("%s/subscriptions", routeName)
u, err := url.Parse(dsInfo.Url)
u, err := url.Parse(dsInfo.URL)
if err != nil {
return nil, err
}
@ -255,13 +235,18 @@ func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo *mode
req.Header.Set("Content-Type", "application/json")
pluginproxy.ApplyRoute(ctx, req, proxyPass, azureMonitorRoute, dsInfo, e.cfg)
// TODO: Use backend authentication instead
proxyPass := fmt.Sprintf("%s/subscriptions", routeName)
pluginproxy.ApplyRoute(ctx, req, proxyPass, azureMonitorRoute, &models.DataSource{
JsonData: simplejson.NewFromAny(dsInfo.JSONData),
SecureJsonData: securejsondata.GetEncryptedJsonData(dsInfo.DecryptedSecureJSONData),
}, e.cfg)
return req, nil
}
func (e *AzureMonitorDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, e.dsInfo.JsonData)
func (e *AzureMonitorDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, dsInfo datasourceInfo) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, dsInfo)
if err != nil {
return nil, "", err
}
@ -304,7 +289,7 @@ func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMon
}
func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *AzureMonitorQuery) (
plugins.DataFrames, error) {
data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
@ -364,7 +349,7 @@ func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *
frames = append(frames, frame)
}
return plugins.NewDecodedDataFrames(frames), nil
return frames, nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name

View File

@ -11,24 +11,30 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/stretchr/testify/require"
ptr "github.com/xorcare/pointer"
)
func TestAzureMonitorBuildQueries(t *testing.T) {
datasource := &AzureMonitorDatasource{}
dsInfo := datasourceInfo{
Settings: azureMonitorSettings{
SubscriptionId: "default-subscription",
},
}
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
duration, _ := time.ParseDuration("400s")
tests := []struct {
name string
azureMonitorVariedProperties map[string]interface{}
azureMonitorQueryTarget string
expectedInterval string
queryIntervalMS int64
queryInterval time.Duration
}{
{
name: "Parse queries from frontend and build AzureMonitor API queries",
@ -45,7 +51,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
"timeGrain": "auto",
"top": "10",
},
queryIntervalMS: 400000,
queryInterval: duration,
expectedInterval: "PT15M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2018-01-01&interval=PT15M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
@ -56,7 +62,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
"allowedTimeGrainsMs": []int64{60000, 300000},
"top": "10",
},
queryIntervalMS: 400000,
queryInterval: duration,
expectedInterval: "PT5M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2018-01-01&interval=PT5M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
@ -68,7 +74,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
"dimensionFilter": "*",
"top": "30",
},
queryIntervalMS: 400000,
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "%24filter=blob+eq+%27%2A%27&aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
},
@ -80,7 +86,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
"dimensionFilter": "*",
"top": "10",
},
queryIntervalMS: 400000,
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
@ -91,7 +97,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
"dimensionFilters": []azureMonitorDimensionFilter{{"blob", "eq", "*"}},
"top": "30",
},
queryIntervalMS: 400000,
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "%24filter=blob+eq+%27%2A%27&aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
},
@ -102,7 +108,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
"dimensionFilters": []azureMonitorDimensionFilter{{"blob", "eq", "*"}, {"tier", "eq", "*"}},
"top": "30",
},
queryIntervalMS: 400000,
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "%24filter=blob+eq+%27%2A%27+and+tier+eq+%27%2A%27&aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
},
@ -125,25 +131,18 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
for k, v := range commonAzureModelProps {
tt.azureMonitorVariedProperties[k] = v
}
tsdbQuery := plugins.DataQuery{
TimeRange: &plugins.DataTimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
Queries: []plugins.DataSubQuery{
azureMonitorJSON, _ := json.Marshal(tt.azureMonitorVariedProperties)
tsdbQuery := []backend.DataQuery{
{
DataSource: &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"subscriptionId": "default-subscription",
}),
},
Model: simplejson.NewFromAny(map[string]interface{}{
JSON: []byte(fmt.Sprintf(`{
"subscription": "12345678-aaaa-bbbb-cccc-123456789abc",
"azureMonitor": tt.azureMonitorVariedProperties,
},
),
"azureMonitor": %s
}`, string(azureMonitorJSON))),
RefID: "A",
IntervalMS: tt.queryIntervalMS,
Interval: tt.queryInterval,
TimeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
},
}
@ -159,9 +158,13 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
Target: tt.azureMonitorQueryTarget,
RefID: "A",
Alias: "testalias",
TimeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
}
queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange)
queries, err := datasource.buildQueries(tsdbQuery, dsInfo)
require.NoError(t, err)
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(AzureMonitorQuery{}, "Params")); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
@ -433,16 +436,11 @@ func TestAzureMonitorParseResponse(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
azData := loadTestFile(t, "azuremonitor/"+tt.responseFile)
//nolint: staticcheck // plugins.DataPlugin deprecated
res := plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"}
require.NotNil(t, res)
dframes, err := datasource.parseResponse(azData, tt.mockQuery)
require.NoError(t, err)
require.NotNil(t, dframes)
frames, err := dframes.Decoded()
require.NoError(t, err)
if diff := cmp.Diff(tt.expectedFrames, frames, data.FrameTestCompareOptions()...); diff != "" {
if diff := cmp.Diff(tt.expectedFrames, dframes, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})

View File

@ -2,19 +2,27 @@ package azuremonitor
import (
"context"
"encoding/json"
"fmt"
"net/http"
"regexp"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/setting"
)
const timeSeries = "time_series"
const (
timeSeries = "time_series"
dsName = "grafana-azure-monitor-datasource"
)
var (
azlog = log.New("tsdb.azuremonitor")
@ -33,144 +41,108 @@ type Service struct {
PluginManager plugins.Manager `inject:""`
HTTPClientProvider httpclient.Provider `inject:""`
Cfg *setting.Cfg `inject:""`
BackendPluginManager backendplugin.Manager `inject:""`
}
func (s *Service) Init() error {
return nil
type azureMonitorSettings struct {
AppInsightsAppId string `json:"appInsightsAppId"`
AzureLogAnalyticsSameAs bool `json:"azureLogAnalyticsSameAs"`
ClientId string `json:"clientId"`
CloudName string `json:"cloudName"`
LogAnalyticsClientId string `json:"logAnalyticsClientId"`
LogAnalyticsDefaultWorkspace string `json:"logAnalyticsDefaultWorkspace"`
LogAnalyticsSubscriptionId string `json:"logAnalyticsSubscriptionId"`
LogAnalyticsTenantId string `json:"logAnalyticsTenantId"`
SubscriptionId string `json:"subscriptionId"`
TenantId string `json:"tenantId"`
AzureAuthType string `json:"azureAuthType,omitempty"`
}
// AzureMonitorExecutor executes queries for the Azure Monitor datasource - all four services
type AzureMonitorExecutor struct {
httpClient *http.Client
dsInfo *models.DataSource
pluginManager plugins.Manager
cfg *setting.Cfg
type datasourceInfo struct {
Settings azureMonitorSettings
HTTPClient *http.Client
URL string
JSONData map[string]interface{}
DecryptedSecureJSONData map[string]string
DatasourceID int64
OrgID int64
}
// NewAzureMonitorExecutor initializes a http client
//nolint: staticcheck // plugins.DataPlugin deprecated
func (s *Service) NewExecutor(dsInfo *models.DataSource) (plugins.DataPlugin, error) {
httpClient, err := dsInfo.GetHTTPClient(s.HTTPClientProvider)
func NewInstanceSettings(httpClientProvider httpclient.Provider) datasource.InstanceFactoryFunc {
return func(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
opts, err := settings.HTTPClientOptions()
if err != nil {
return nil, err
}
return &AzureMonitorExecutor{
httpClient: httpClient,
dsInfo: dsInfo,
pluginManager: s.PluginManager,
cfg: s.Cfg,
}, nil
client, err := httpClientProvider.New(opts)
if err != nil {
return nil, err
}
jsonData := map[string]interface{}{}
err = json.Unmarshal(settings.JSONData, &jsonData)
if err != nil {
return nil, fmt.Errorf("error reading settings: %w", err)
}
azMonitorSettings := azureMonitorSettings{}
err = json.Unmarshal(settings.JSONData, &azMonitorSettings)
if err != nil {
return nil, fmt.Errorf("error reading settings: %w", err)
}
model := datasourceInfo{
Settings: azMonitorSettings,
HTTPClient: client,
URL: settings.URL,
JSONData: jsonData,
DecryptedSecureJSONData: settings.DecryptedSecureJSONData,
DatasourceID: settings.ID,
}
return model, nil
}
}
// Query takes in the frontend queries, parses them into the query format
// expected by chosen Azure Monitor service (Azure Monitor, App Insights etc.)
// executes the queries against the API and parses the response into
// the right format
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *AzureMonitorExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource,
tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) {
var err error
var azureMonitorQueries []plugins.DataSubQuery
var applicationInsightsQueries []plugins.DataSubQuery
var azureLogAnalyticsQueries []plugins.DataSubQuery
var insightsAnalyticsQueries []plugins.DataSubQuery
var azureResourceGraphQueries []plugins.DataSubQuery
for _, query := range tsdbQuery.Queries {
queryType := query.Model.Get("queryType").MustString("")
switch queryType {
case "Azure Monitor":
azureMonitorQueries = append(azureMonitorQueries, query)
case "Application Insights":
applicationInsightsQueries = append(applicationInsightsQueries, query)
case "Azure Log Analytics":
azureLogAnalyticsQueries = append(azureLogAnalyticsQueries, query)
case "Insights Analytics":
insightsAnalyticsQueries = append(insightsAnalyticsQueries, query)
case "Azure Resource Graph":
azureResourceGraphQueries = append(azureResourceGraphQueries, query)
default:
return plugins.DataResponse{}, fmt.Errorf("alerting not supported for %q", queryType)
}
}
azDatasource := &AzureMonitorDatasource{
httpClient: e.httpClient,
dsInfo: e.dsInfo,
pluginManager: e.pluginManager,
cfg: e.cfg,
}
aiDatasource := &ApplicationInsightsDatasource{
httpClient: e.httpClient,
dsInfo: e.dsInfo,
pluginManager: e.pluginManager,
cfg: e.cfg,
}
alaDatasource := &AzureLogAnalyticsDatasource{
httpClient: e.httpClient,
dsInfo: e.dsInfo,
pluginManager: e.pluginManager,
cfg: e.cfg,
}
iaDatasource := &InsightsAnalyticsDatasource{
httpClient: e.httpClient,
dsInfo: e.dsInfo,
pluginManager: e.pluginManager,
cfg: e.cfg,
}
argDatasource := &AzureResourceGraphDatasource{
httpClient: e.httpClient,
dsInfo: e.dsInfo,
pluginManager: e.pluginManager,
}
azResult, err := azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, *tsdbQuery.TimeRange)
if err != nil {
return plugins.DataResponse{}, err
}
aiResult, err := aiDatasource.executeTimeSeriesQuery(ctx, applicationInsightsQueries, *tsdbQuery.TimeRange)
if err != nil {
return plugins.DataResponse{}, err
}
alaResult, err := alaDatasource.executeTimeSeriesQuery(ctx, azureLogAnalyticsQueries, *tsdbQuery.TimeRange)
if err != nil {
return plugins.DataResponse{}, err
}
iaResult, err := iaDatasource.executeTimeSeriesQuery(ctx, insightsAnalyticsQueries, *tsdbQuery.TimeRange)
if err != nil {
return plugins.DataResponse{}, err
}
argResult, err := argDatasource.executeTimeSeriesQuery(ctx, azureResourceGraphQueries, *tsdbQuery.TimeRange)
if err != nil {
return plugins.DataResponse{}, err
}
for k, v := range aiResult.Results {
azResult.Results[k] = v
}
for k, v := range alaResult.Results {
azResult.Results[k] = v
}
for k, v := range iaResult.Results {
azResult.Results[k] = v
}
for k, v := range argResult.Responses {
azResult.Results[k] = plugins.DataQueryResult{Error: v.Error, Dataframes: plugins.NewDecodedDataFrames(v.Frames)}
}
return azResult, nil
type azDatasourceExecutor interface {
executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error)
}
func newExecutor(im instancemgmt.InstanceManager, pm plugins.Manager, httpC httpclient.Provider, cfg *setting.Cfg) *datasource.QueryTypeMux {
mux := datasource.NewQueryTypeMux()
executors := map[string]azDatasourceExecutor{
"Azure Monitor": &AzureMonitorDatasource{pm, cfg},
"Application Insights": &ApplicationInsightsDatasource{pm, cfg},
"Azure Log Analytics": &AzureLogAnalyticsDatasource{pm, cfg},
"Insights Analytics": &InsightsAnalyticsDatasource{pm, cfg},
"Azure Resource Graph": &AzureResourceGraphDatasource{pm, cfg},
}
for dsType := range executors {
// Make a copy of the string to keep the reference after the iterator
dst := dsType
mux.HandleFunc(dsType, func(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
i, err := im.Get(req.PluginContext)
if err != nil {
return nil, err
}
dsInfo := i.(datasourceInfo)
dsInfo.OrgID = req.PluginContext.OrgID
ds := executors[dst]
return ds.executeTimeSeriesQuery(ctx, req.Queries, dsInfo)
})
}
return mux
}
func (s *Service) Init() error {
im := datasource.NewInstanceManager(NewInstanceSettings(s.HTTPClientProvider))
factory := coreplugin.New(backend.ServeOpts{
QueryDataHandler: newExecutor(im, s.PluginManager, s.HTTPClientProvider, s.Cfg),
})
if err := s.BackendPluginManager.Register(dsName, factory); err != nil {
azlog.Error("Failed to register plugin", "error", err)
}
return nil
}

View File

@ -3,7 +3,6 @@ package azuremonitor
import (
"fmt"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/setting"
)
@ -20,12 +19,12 @@ const (
azureMonitorGermany = "germanyazuremonitor"
)
func getAuthType(cfg *setting.Cfg, pluginData *simplejson.Json) string {
if authType := pluginData.Get("azureAuthType").MustString(); authType != "" {
return authType
func getAuthType(cfg *setting.Cfg, dsInfo datasourceInfo) string {
if dsInfo.Settings.AzureAuthType != "" {
return dsInfo.Settings.AzureAuthType
} else {
tenantId := pluginData.Get("tenantId").MustString()
clientId := pluginData.Get("clientId").MustString()
tenantId := dsInfo.Settings.TenantId
clientId := dsInfo.Settings.ClientId
// If authentication type isn't explicitly specified and datasource has client credentials,
// then this is existing datasource which is configured for app registration (client secret)
@ -59,15 +58,15 @@ func getDefaultAzureCloud(cfg *setting.Cfg) (string, error) {
}
}
func getAzureCloud(cfg *setting.Cfg, pluginData *simplejson.Json) (string, error) {
authType := getAuthType(cfg, pluginData)
func getAzureCloud(cfg *setting.Cfg, dsInfo datasourceInfo) (string, error) {
authType := getAuthType(cfg, dsInfo)
switch authType {
case AzureAuthManagedIdentity:
// In case of managed identity, the cloud is always same as where Grafana is hosted
return getDefaultAzureCloud(cfg)
case AzureAuthClientSecret:
if cloud := pluginData.Get("cloudName").MustString(); cloud != "" {
return cloud, nil
if dsInfo.Settings.CloudName != "" {
return dsInfo.Settings.CloudName, nil
} else {
return getDefaultAzureCloud(cfg)
}

View File

@ -11,8 +11,11 @@ import (
"net/url"
"path"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/securejsondata"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting"
@ -22,8 +25,6 @@ import (
)
type InsightsAnalyticsDatasource struct {
httpClient *http.Client
dsInfo *models.DataSource
pluginManager plugins.Manager
cfg *setting.Cfg
}
@ -40,38 +41,29 @@ type InsightsAnalyticsQuery struct {
Target string
}
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context,
originalQueries []plugins.DataSubQuery, timeRange plugins.DataTimeRange) (plugins.DataResponse, error) {
result := plugins.DataResponse{
Results: map[string]plugins.DataQueryResult{},
}
originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
queries, err := e.buildQueries(originalQueries, timeRange)
queries, err := e.buildQueries(originalQueries, dsInfo)
if err != nil {
return plugins.DataResponse{}, err
return nil, err
}
for _, query := range queries {
result.Results[query.RefID] = e.executeQuery(ctx, query)
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo)
}
return result, nil
}
func (e *InsightsAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuery,
timeRange plugins.DataTimeRange) ([]*InsightsAnalyticsQuery, error) {
func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*InsightsAnalyticsQuery, error) {
iaQueries := []*InsightsAnalyticsQuery{}
for _, query := range queries {
queryBytes, err := query.Model.Encode()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Application Insights Analytics query into JSON: %w", err)
}
qm := InsightsAnalyticsQuery{}
queryJSONModel := insightsAnalyticsJSONQuery{}
err = json.Unmarshal(queryBytes, &queryJSONModel)
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Application Insights Analytics query object from JSON: %w", err)
}
@ -84,7 +76,7 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuer
return nil, fmt.Errorf("query is missing query string property")
}
qm.InterpolatedQuery, err = KqlInterpolate(query, timeRange, qm.RawQuery)
qm.InterpolatedQuery, err = KqlInterpolate(query, dsInfo, qm.RawQuery)
if err != nil {
return nil, err
}
@ -98,26 +90,25 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuer
return iaQueries, nil
}
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery) plugins.DataQueryResult {
queryResult := plugins.DataQueryResult{RefID: query.RefID}
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo datasourceInfo) backend.DataResponse {
dataResponse := backend.DataResponse{}
queryResultError := func(err error) plugins.DataQueryResult {
queryResult.Error = err
return queryResult
dataResponseError := func(err error) backend.DataResponse {
dataResponse.Error = err
return dataResponse
}
req, err := e.createRequest(ctx, e.dsInfo)
req, err := e.createRequest(ctx, dsInfo)
if err != nil {
return queryResultError(err)
return dataResponseError(err)
}
req.URL.Path = path.Join(req.URL.Path, "query")
req.URL.RawQuery = query.Params.Encode()
span, ctx := opentracing.StartSpanFromContext(ctx, "application insights analytics query")
span.SetTag("target", query.Target)
span.SetTag("datasource_id", e.dsInfo.Id)
span.SetTag("org_id", e.dsInfo.OrgId)
span.SetTag("datasource_id", dsInfo.DatasourceID)
span.SetTag("org_id", dsInfo.OrgID)
defer span.Finish()
@ -131,14 +122,14 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
}
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String())
res, err := ctxhttp.Do(ctx, e.httpClient, req)
res, err := ctxhttp.Do(ctx, dsInfo.HTTPClient, req)
if err != nil {
return queryResultError(err)
return dataResponseError(err)
}
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return queryResultError(err)
return dataResponseError(err)
}
defer func() {
if err := res.Body.Close(); err != nil {
@ -148,24 +139,24 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
if res.StatusCode/100 != 2 {
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return queryResultError(fmt.Errorf("request failed, status: %s, body: %s", res.Status, body))
return dataResponseError(fmt.Errorf("request failed, status: %s, body: %s", res.Status, body))
}
var logResponse AzureLogAnalyticsResponse
d := json.NewDecoder(bytes.NewReader(body))
d.UseNumber()
err = d.Decode(&logResponse)
if err != nil {
return queryResultError(err)
return dataResponseError(err)
}
t, err := logResponse.GetPrimaryResultTable()
if err != nil {
return queryResultError(err)
return dataResponseError(err)
}
frame, err := ResponseTableToFrame(t)
if err != nil {
return queryResultError(err)
return dataResponseError(err)
}
if query.ResultFormat == timeSeries {
@ -182,28 +173,26 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
}
}
}
frames := data.Frames{frame}
queryResult.Dataframes = plugins.NewDecodedDataFrames(frames)
dataResponse.Frames = data.Frames{frame}
return queryResult
return dataResponse
}
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo) (*http.Request, error) {
// find plugin
plugin := e.pluginManager.GetDataSource(dsInfo.Type)
plugin := e.pluginManager.GetDataSource(dsName)
if plugin == nil {
return nil, errors.New("unable to find datasource plugin Azure Application Insights")
}
appInsightsRoute, routeName, err := e.getPluginRoute(plugin)
appInsightsRoute, routeName, err := e.getPluginRoute(plugin, dsInfo)
if err != nil {
return nil, err
}
appInsightsAppID := dsInfo.JsonData.Get("appInsightsAppId").MustString()
proxyPass := fmt.Sprintf("%s/v1/apps/%s", routeName, appInsightsAppID)
appInsightsAppID := dsInfo.Settings.AppInsightsAppId
u, err := url.Parse(dsInfo.Url)
u, err := url.Parse(dsInfo.URL)
if err != nil {
return nil, fmt.Errorf("unable to parse url for Application Insights Analytics datasource: %w", err)
}
@ -215,13 +204,18 @@ func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo
return nil, errutil.Wrap("Failed to create request", err)
}
pluginproxy.ApplyRoute(ctx, req, proxyPass, appInsightsRoute, dsInfo, e.cfg)
// TODO: Use backend authentication instead
proxyPass := fmt.Sprintf("%s/v1/apps/%s", routeName, appInsightsAppID)
pluginproxy.ApplyRoute(ctx, req, proxyPass, appInsightsRoute, &models.DataSource{
JsonData: simplejson.NewFromAny(dsInfo.JSONData),
SecureJsonData: securejsondata.GetEncryptedJsonData(dsInfo.DecryptedSecureJSONData),
}, e.cfg)
return req, nil
}
func (e *InsightsAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, e.dsInfo.JsonData)
func (e *InsightsAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, dsInfo datasourceInfo) (*plugins.AppPluginRoute, string, error) {
cloud, err := getAzureCloud(e.cfg, dsInfo)
if err != nil {
return nil, "", err
}

View File

@ -6,7 +6,9 @@ import (
"strings"
"time"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb/interval"
)
@ -15,8 +17,8 @@ const sExpr = `\$` + rsIdentifier + `(?:\(([^\)]*)\))?`
const escapeMultiExpr = `\$__escapeMulti\(('.*')\)`
type kqlMacroEngine struct {
timeRange plugins.DataTimeRange
query plugins.DataSubQuery
timeRange backend.TimeRange
query backend.DataQuery
}
// Macros:
@ -29,18 +31,18 @@ type kqlMacroEngine struct {
// - $__escapeMulti('\\vm\eth0\Total','\\vm\eth2\Total') -> @'\\vm\eth0\Total',@'\\vm\eth2\Total'
// KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries
func KqlInterpolate(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, kql string, defaultTimeField ...string) (string, error) {
func KqlInterpolate(query backend.DataQuery, dsInfo datasourceInfo, kql string, defaultTimeField ...string) (string, error) {
engine := kqlMacroEngine{}
defaultTimeFieldForAllDatasources := "timestamp"
if len(defaultTimeField) > 0 {
defaultTimeFieldForAllDatasources = defaultTimeField[0]
}
return engine.Interpolate(query, timeRange, kql, defaultTimeFieldForAllDatasources)
return engine.Interpolate(query, dsInfo, kql, defaultTimeFieldForAllDatasources)
}
func (m *kqlMacroEngine) Interpolate(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, kql string, defaultTimeField string) (string, error) {
m.timeRange = timeRange
func (m *kqlMacroEngine) Interpolate(query backend.DataQuery, dsInfo datasourceInfo, kql string, defaultTimeField string) (string, error) {
m.timeRange = query.TimeRange
m.query = query
rExp, _ := regexp.Compile(sExpr)
escapeMultiRegex, _ := regexp.Compile(escapeMultiExpr)
@ -69,7 +71,7 @@ func (m *kqlMacroEngine) Interpolate(query plugins.DataSubQuery, timeRange plugi
for i, arg := range args {
args[i] = strings.Trim(arg, " ")
}
res, err := m.evaluateMacro(groups[1], defaultTimeField, args)
res, err := m.evaluateMacro(groups[1], defaultTimeField, args, dsInfo)
if err != nil && macroError == nil {
macroError = err
return "macro_error()"
@ -84,7 +86,7 @@ func (m *kqlMacroEngine) Interpolate(query plugins.DataSubQuery, timeRange plugi
return kql, nil
}
func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, args []string) (string, error) {
func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, args []string, dsInfo datasourceInfo) (string, error) {
switch name {
case "timeFilter":
timeColumn := defaultTimeField
@ -92,27 +94,34 @@ func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, arg
timeColumn = args[0]
}
return fmt.Sprintf("['%s'] >= datetime('%s') and ['%s'] <= datetime('%s')", timeColumn,
m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), timeColumn,
m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
m.timeRange.From.UTC().Format(time.RFC3339), timeColumn,
m.timeRange.To.UTC().Format(time.RFC3339)), nil
case "timeFrom", "__from":
return fmt.Sprintf("datetime('%s')", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
return fmt.Sprintf("datetime('%s')", m.timeRange.From.UTC().Format(time.RFC3339)), nil
case "timeTo", "__to":
return fmt.Sprintf("datetime('%s')", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
return fmt.Sprintf("datetime('%s')", m.timeRange.To.UTC().Format(time.RFC3339)), nil
case "interval":
var it time.Duration
if m.query.IntervalMS == 0 {
to := m.timeRange.MustGetTo().UnixNano()
from := m.timeRange.MustGetFrom().UnixNano()
if m.query.Interval.Milliseconds() == 0 {
to := m.timeRange.To.UnixNano()
from := m.timeRange.From.UnixNano()
// default to "100 datapoints" if nothing in the query is more specific
defaultInterval := time.Duration((to - from) / 60)
var err error
it, err = interval.GetIntervalFrom(m.query.DataSource, m.query.Model, defaultInterval)
model, err := simplejson.NewJson(m.query.JSON)
if err != nil {
azlog.Warn("Unable to get interval from query", "datasource", m.query.DataSource, "model", m.query.Model)
azlog.Warn("Unable to parse model from query", "JSON", m.query.JSON)
it = defaultInterval
} else {
it, err = interval.GetIntervalFrom(&models.DataSource{
JsonData: simplejson.NewFromAny(dsInfo.JSONData),
}, model, defaultInterval)
if err != nil {
azlog.Warn("Unable to get interval from query", "model", model)
it = defaultInterval
}
}
} else {
it = time.Millisecond * time.Duration(m.query.IntervalMS)
it = time.Millisecond * time.Duration(m.query.Interval.Milliseconds())
}
return fmt.Sprintf("%dms", int(it/time.Millisecond)), nil
case "contains":

View File

@ -1,96 +1,92 @@
package azuremonitor
import (
"fmt"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
)
func TestAzureLogAnalyticsMacros(t *testing.T) {
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
timeRange := plugins.DataTimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
timeRange := backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
}
tests := []struct {
name string
query plugins.DataSubQuery
timeRange plugins.DataTimeRange
query backend.DataQuery
kql string
expected string
Err require.ErrorAssertionFunc
}{
{
name: "invalid macro should be ignored",
query: plugins.DataSubQuery{},
query: backend.DataQuery{},
kql: "$__invalid()",
expected: "$__invalid()",
Err: require.NoError,
},
{
name: "Kusto variables should be ignored",
query: plugins.DataSubQuery{},
query: backend.DataQuery{},
kql: ") on $left.b == $right.y",
expected: ") on $left.b == $right.y",
Err: require.NoError,
},
{
name: "$__contains macro with a multi template variable that has multiple selected values as a parameter should build in clause",
query: plugins.DataSubQuery{},
query: backend.DataQuery{},
kql: "$__contains(col, 'val1','val2')",
expected: "['col'] in ('val1','val2')",
Err: require.NoError,
},
{
name: "$__contains macro with a multi template variable that has a single selected value as a parameter should build in clause",
query: plugins.DataSubQuery{},
query: backend.DataQuery{},
kql: "$__contains(col, 'val1' )",
expected: "['col'] in ('val1')",
Err: require.NoError,
},
{
name: "$__contains macro with multi template variable has custom All value as a parameter should return a true expression",
query: plugins.DataSubQuery{},
query: backend.DataQuery{},
kql: "$__contains(col, all)",
expected: "1 == 1",
Err: require.NoError,
},
{
name: "$__timeFilter has no column parameter should use default time field",
query: plugins.DataSubQuery{},
query: backend.DataQuery{TimeRange: timeRange},
kql: "$__timeFilter()",
expected: "['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z')",
Err: require.NoError,
},
{
name: "$__timeFilter has time field parameter",
query: plugins.DataSubQuery{},
query: backend.DataQuery{TimeRange: timeRange},
kql: "$__timeFilter(myTimeField)",
expected: "['myTimeField'] >= datetime('2018-03-15T13:00:00Z') and ['myTimeField'] <= datetime('2018-03-15T13:34:00Z')",
Err: require.NoError,
},
{
name: "$__timeFrom and $__timeTo is in the query and range is a specific interval",
query: plugins.DataSubQuery{},
query: backend.DataQuery{TimeRange: timeRange},
kql: "myTimeField >= $__timeFrom() and myTimeField <= $__timeTo()",
expected: "myTimeField >= datetime('2018-03-15T13:00:00Z') and myTimeField <= datetime('2018-03-15T13:34:00Z')",
Err: require.NoError,
},
{
name: "$__interval should use the defined interval from the query",
timeRange: timeRange,
query: plugins.DataSubQuery{
Model: simplejson.NewFromAny(map[string]interface{}{
"interval": "5m",
}),
query: backend.DataQuery{
JSON: []byte(`{
"interval": "5m"
}`),
TimeRange: timeRange,
},
kql: "bin(TimeGenerated, $__interval)",
expected: "bin(TimeGenerated, 300000ms)",
@ -98,40 +94,28 @@ func TestAzureLogAnalyticsMacros(t *testing.T) {
},
{
name: "$__interval should use the default interval if none is specified",
query: plugins.DataSubQuery{
DataSource: &models.DataSource{},
Model: simplejson.NewFromAny(map[string]interface{}{}),
},
query: backend.DataQuery{TimeRange: timeRange},
kql: "bin(TimeGenerated, $__interval)",
expected: "bin(TimeGenerated, 34000ms)",
Err: require.NoError,
},
{
name: "$__escapeMulti with multi template variable should replace values with KQL style escaped strings",
query: plugins.DataSubQuery{
DataSource: &models.DataSource{},
Model: simplejson.NewFromAny(map[string]interface{}{}),
},
query: backend.DataQuery{},
kql: `CounterPath in ($__escapeMulti('\\grafana-vm\Network(eth0)\Total','\\grafana-vm\Network(eth1)\Total'))`,
expected: `CounterPath in (@'\\grafana-vm\Network(eth0)\Total', @'\\grafana-vm\Network(eth1)\Total')`,
Err: require.NoError,
},
{
name: "$__escapeMulti with multi template variable and has one selected value that contains comma",
query: plugins.DataSubQuery{
DataSource: &models.DataSource{},
Model: simplejson.NewFromAny(map[string]interface{}{}),
},
query: backend.DataQuery{},
kql: `$__escapeMulti('\\grafana-vm,\Network(eth0)\Total Bytes Received')`,
expected: `@'\\grafana-vm,\Network(eth0)\Total Bytes Received'`,
Err: require.NoError,
},
{
name: "$__escapeMulti with multi template variable and is not wrapped in single quotes should fail",
query: plugins.DataSubQuery{
DataSource: &models.DataSource{},
Model: simplejson.NewFromAny(map[string]interface{}{}),
},
query: backend.DataQuery{},
kql: `$__escapeMulti(\\grafana-vm,\Network(eth0)\Total Bytes Received)`,
expected: "",
Err: require.Error,
@ -141,7 +125,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
defaultTimeField := "TimeGenerated"
rawQuery, err := KqlInterpolate(tt.query, timeRange, tt.kql, defaultTimeField)
rawQuery, err := KqlInterpolate(tt.query, datasourceInfo{}, tt.kql, defaultTimeField)
tt.Err(t, err)
if diff := cmp.Diff(tt.expected, rawQuery, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)

View File

@ -6,6 +6,8 @@ import (
"net/url"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
)
// AzureMonitorQuery is the query for all the services as they have similar queries
@ -17,6 +19,7 @@ type AzureMonitorQuery struct {
Params url.Values
RefID string
Alias string
TimeRange backend.TimeRange
}
// AzureMonitorResponse is the json response from the Azure Monitor API

View File

@ -65,7 +65,6 @@ func (s *Service) Init() error {
s.registry["mysql"] = mysql.New(s.HTTPClientProvider)
s.registry["elasticsearch"] = elasticsearch.New(s.HTTPClientProvider)
s.registry["stackdriver"] = s.CloudMonitoringService.NewExecutor
s.registry["grafana-azure-monitor-datasource"] = s.AzureMonitorService.NewExecutor
s.registry["loki"] = loki.New(s.HTTPClientProvider)
s.registry["tempo"] = tempo.New(s.HTTPClientProvider)
return nil