mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Azure Monitor: Implement CallResourceHandler in the backend (#35581)
This commit is contained in:
parent
a6872deeb9
commit
96efbbaed1
@ -20,7 +20,9 @@ import (
|
||||
)
|
||||
|
||||
// ApplicationInsightsDatasource calls the application insights query API.
|
||||
type ApplicationInsightsDatasource struct{}
|
||||
type ApplicationInsightsDatasource struct {
|
||||
proxy serviceProxy
|
||||
}
|
||||
|
||||
// ApplicationInsightsQuery is the model that holds the information
|
||||
// needed to make a metrics query to Application Insights, and the information
|
||||
@ -41,8 +43,12 @@ type ApplicationInsightsQuery struct {
|
||||
aggregation string
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
|
||||
e.proxy.Do(rw, req, cli)
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context,
|
||||
originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
|
||||
originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string) (*backend.QueryDataResponse, error) {
|
||||
result := backend.NewQueryDataResponse()
|
||||
|
||||
queries, err := e.buildQueries(originalQueries)
|
||||
@ -51,7 +57,7 @@ func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Conte
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
queryRes, err := e.executeQuery(ctx, query, dsInfo)
|
||||
queryRes, err := e.executeQuery(ctx, query, dsInfo, client, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -122,11 +128,11 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []backend.DataQuery
|
||||
return applicationInsightsQueries, nil
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo datasourceInfo) (
|
||||
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo datasourceInfo, client *http.Client, url string) (
|
||||
backend.DataResponse, error) {
|
||||
dataResponse := backend.DataResponse{}
|
||||
|
||||
req, err := e.createRequest(ctx, dsInfo)
|
||||
req, err := e.createRequest(ctx, dsInfo, url)
|
||||
if err != nil {
|
||||
dataResponse.Error = err
|
||||
return dataResponse, nil
|
||||
@ -154,7 +160,7 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
|
||||
}
|
||||
|
||||
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String())
|
||||
res, err := ctxhttp.Do(ctx, dsInfo.Services[appInsights].HTTPClient, req)
|
||||
res, err := ctxhttp.Do(ctx, client, req)
|
||||
if err != nil {
|
||||
dataResponse.Error = err
|
||||
return dataResponse, nil
|
||||
@ -193,16 +199,14 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
|
||||
return dataResponse, nil
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo) (*http.Request, error) {
|
||||
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
|
||||
appInsightsAppID := dsInfo.Settings.AppInsightsAppId
|
||||
|
||||
req, err := http.NewRequest(http.MethodGet, dsInfo.Services[appInsights].URL, nil)
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
azlog.Debug("Failed to create request", "error", err)
|
||||
return nil, errutil.Wrap("Failed to create request", err)
|
||||
}
|
||||
req.Header.Set("X-API-Key", dsInfo.DecryptedSecureJSONData["appInsightsApiKey"])
|
||||
|
||||
req.URL.Path = fmt.Sprintf("/v1/apps/%s", appInsightsAppID)
|
||||
|
||||
return req, nil
|
||||
|
@ -3,11 +3,9 @@ package azuremonitor
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
@ -207,43 +205,34 @@ func TestInsightsDimensionsUnmarshalJSON(t *testing.T) {
|
||||
|
||||
func TestAppInsightsCreateRequest(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
url := "http://ds"
|
||||
dsInfo := datasourceInfo{
|
||||
Settings: azureMonitorSettings{AppInsightsAppId: "foo"},
|
||||
Services: map[string]datasourceService{
|
||||
appInsights: {URL: "http://ds"},
|
||||
},
|
||||
DecryptedSecureJSONData: map[string]string{
|
||||
"appInsightsApiKey": "key",
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
expectedURL string
|
||||
expectedHeaders http.Header
|
||||
Err require.ErrorAssertionFunc
|
||||
name string
|
||||
expectedURL string
|
||||
Err require.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "creates a request",
|
||||
expectedURL: "http://ds/v1/apps/foo",
|
||||
expectedHeaders: http.Header{
|
||||
"X-Api-Key": []string{"key"},
|
||||
},
|
||||
Err: require.NoError,
|
||||
Err: require.NoError,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ds := ApplicationInsightsDatasource{}
|
||||
req, err := ds.createRequest(ctx, dsInfo)
|
||||
req, err := ds.createRequest(ctx, dsInfo, url)
|
||||
tt.Err(t, err)
|
||||
if req.URL.String() != tt.expectedURL {
|
||||
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
|
||||
}
|
||||
if !cmp.Equal(req.Header, tt.expectedHeaders) {
|
||||
t.Errorf("Unexpected HTTP headers: %v", cmp.Diff(req.Header, tt.expectedHeaders))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,9 @@ import (
|
||||
)
|
||||
|
||||
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
|
||||
type AzureLogAnalyticsDatasource struct{}
|
||||
type AzureLogAnalyticsDatasource struct {
|
||||
proxy serviceProxy
|
||||
}
|
||||
|
||||
// AzureLogAnalyticsQuery is the query request that is built from the saved values for
|
||||
// from the UI
|
||||
@ -36,11 +38,15 @@ type AzureLogAnalyticsQuery struct {
|
||||
TimeRange backend.TimeRange
|
||||
}
|
||||
|
||||
func (e *AzureLogAnalyticsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
|
||||
e.proxy.Do(rw, req, cli)
|
||||
}
|
||||
|
||||
// executeTimeSeriesQuery does the following:
|
||||
// 1. build the AzureMonitor url and querystring for each query
|
||||
// 2. executes each query by calling the Azure Monitor API
|
||||
// 3. parses the responses for each query into data frames
|
||||
func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
|
||||
func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string) (*backend.QueryDataResponse, error) {
|
||||
result := backend.NewQueryDataResponse()
|
||||
|
||||
queries, err := e.buildQueries(originalQueries, dsInfo)
|
||||
@ -49,7 +55,7 @@ func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo)
|
||||
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo, client, url)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
@ -119,7 +125,7 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
|
||||
return azureLogAnalyticsQueries, nil
|
||||
}
|
||||
|
||||
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo datasourceInfo) backend.DataResponse {
|
||||
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo datasourceInfo, client *http.Client, url string) backend.DataResponse {
|
||||
dataResponse := backend.DataResponse{}
|
||||
|
||||
dataResponseErrorWithExecuted := func(err error) backend.DataResponse {
|
||||
@ -140,8 +146,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
|
||||
return dataResponseErrorWithExecuted(fmt.Errorf("Log Analytics credentials are no longer supported. Go to the data source configuration to update Azure Monitor credentials")) //nolint:golint,stylecheck
|
||||
}
|
||||
|
||||
req, err := e.createRequest(ctx, dsInfo)
|
||||
|
||||
req, err := e.createRequest(ctx, dsInfo, url)
|
||||
if err != nil {
|
||||
dataResponse.Error = err
|
||||
return dataResponse
|
||||
@ -167,7 +172,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
|
||||
}
|
||||
|
||||
azlog.Debug("AzureLogAnalytics", "Request ApiURL", req.URL.String())
|
||||
res, err := ctxhttp.Do(ctx, dsInfo.Services[azureLogAnalytics].HTTPClient, req)
|
||||
res, err := ctxhttp.Do(ctx, client, req)
|
||||
if err != nil {
|
||||
return dataResponseErrorWithExecuted(err)
|
||||
}
|
||||
@ -217,8 +222,8 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
|
||||
return dataResponse
|
||||
}
|
||||
|
||||
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo) (*http.Request, error) {
|
||||
req, err := http.NewRequest(http.MethodGet, dsInfo.Services[azureLogAnalytics].URL, nil)
|
||||
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
azlog.Debug("Failed to create request", "error", err)
|
||||
return nil, errutil.Wrap("failed to create request", err)
|
||||
|
@ -181,11 +181,8 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
|
||||
|
||||
func TestLogAnalyticsCreateRequest(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
dsInfo := datasourceInfo{
|
||||
Services: map[string]datasourceService{
|
||||
azureLogAnalytics: {URL: "http://ds"},
|
||||
},
|
||||
}
|
||||
url := "http://ds"
|
||||
dsInfo := datasourceInfo{}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
@ -204,7 +201,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ds := AzureLogAnalyticsDatasource{}
|
||||
req, err := ds.createRequest(ctx, dsInfo)
|
||||
req, err := ds.createRequest(ctx, dsInfo, url)
|
||||
tt.Err(t, err)
|
||||
if req.URL.String() != tt.expectedURL {
|
||||
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
|
||||
@ -231,7 +228,7 @@ func Test_executeQueryErrorWithDifferentLogAnalyticsCreds(t *testing.T) {
|
||||
Params: url.Values{},
|
||||
TimeRange: backend.TimeRange{},
|
||||
}
|
||||
res := ds.executeQuery(ctx, query, dsInfo)
|
||||
res := ds.executeQuery(ctx, query, dsInfo, &http.Client{}, dsInfo.Services[azureLogAnalytics].URL)
|
||||
if res.Error == nil {
|
||||
t.Fatal("expecting an error")
|
||||
}
|
||||
|
@ -22,7 +22,9 @@ import (
|
||||
)
|
||||
|
||||
// AzureResourceGraphDatasource calls the Azure Resource Graph API's
|
||||
type AzureResourceGraphDatasource struct{}
|
||||
type AzureResourceGraphDatasource struct {
|
||||
proxy serviceProxy
|
||||
}
|
||||
|
||||
// AzureResourceGraphQuery is the query request that is built from the saved values for
|
||||
// from the UI
|
||||
@ -38,11 +40,15 @@ type AzureResourceGraphQuery struct {
|
||||
const argAPIVersion = "2021-03-01"
|
||||
const argQueryProviderName = "/providers/Microsoft.ResourceGraph/resources"
|
||||
|
||||
func (e *AzureResourceGraphDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
|
||||
e.proxy.Do(rw, req, cli)
|
||||
}
|
||||
|
||||
// executeTimeSeriesQuery does the following:
|
||||
// 1. builds the AzureMonitor url and querystring for each query
|
||||
// 2. executes each query by calling the Azure Monitor API
|
||||
// 3. parses the responses for each query into data frames
|
||||
func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
|
||||
func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string) (*backend.QueryDataResponse, error) {
|
||||
result := &backend.QueryDataResponse{
|
||||
Responses: map[string]backend.DataResponse{},
|
||||
}
|
||||
@ -53,7 +59,7 @@ func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Contex
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo)
|
||||
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo, client, url)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
@ -95,7 +101,7 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery,
|
||||
return azureResourceGraphQueries, nil
|
||||
}
|
||||
|
||||
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo datasourceInfo) backend.DataResponse {
|
||||
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo datasourceInfo, client *http.Client, dsURL string) backend.DataResponse {
|
||||
dataResponse := backend.DataResponse{}
|
||||
|
||||
params := url.Values{}
|
||||
@ -132,7 +138,7 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
|
||||
return dataResponse
|
||||
}
|
||||
|
||||
req, err := e.createRequest(ctx, dsInfo, reqBody)
|
||||
req, err := e.createRequest(ctx, dsInfo, reqBody, dsURL)
|
||||
|
||||
if err != nil {
|
||||
dataResponse.Error = err
|
||||
@ -159,7 +165,7 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
|
||||
}
|
||||
|
||||
azlog.Debug("AzureResourceGraph", "Request ApiURL", req.URL.String())
|
||||
res, err := ctxhttp.Do(ctx, dsInfo.Services[azureResourceGraph].HTTPClient, req)
|
||||
res, err := ctxhttp.Do(ctx, client, req)
|
||||
if err != nil {
|
||||
return dataResponseErrorWithExecuted(err)
|
||||
}
|
||||
@ -182,8 +188,8 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
|
||||
return dataResponse
|
||||
}
|
||||
|
||||
func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, reqBody []byte) (*http.Request, error) {
|
||||
req, err := http.NewRequest(http.MethodPost, dsInfo.Services[azureResourceGraph].URL, bytes.NewBuffer(reqBody))
|
||||
func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, reqBody []byte, url string) (*http.Request, error) {
|
||||
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(reqBody))
|
||||
if err != nil {
|
||||
azlog.Debug("Failed to create request", "error", err)
|
||||
return nil, errutil.Wrap("failed to create request", err)
|
||||
|
@ -76,11 +76,8 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
|
||||
|
||||
func TestAzureResourceGraphCreateRequest(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
dsInfo := datasourceInfo{
|
||||
Services: map[string]datasourceService{
|
||||
azureResourceGraph: {URL: "http://ds"},
|
||||
},
|
||||
}
|
||||
url := "http://ds"
|
||||
dsInfo := datasourceInfo{}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
@ -102,7 +99,7 @@ func TestAzureResourceGraphCreateRequest(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ds := AzureResourceGraphDatasource{}
|
||||
req, err := ds.createRequest(ctx, dsInfo, []byte{})
|
||||
req, err := ds.createRequest(ctx, dsInfo, []byte{}, url)
|
||||
tt.Err(t, err)
|
||||
if req.URL.String() != tt.expectedURL {
|
||||
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
|
||||
|
@ -21,7 +21,9 @@ import (
|
||||
)
|
||||
|
||||
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
|
||||
type AzureMonitorDatasource struct{}
|
||||
type AzureMonitorDatasource struct {
|
||||
proxy serviceProxy
|
||||
}
|
||||
|
||||
var (
|
||||
// 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds
|
||||
@ -30,11 +32,15 @@ var (
|
||||
|
||||
const azureMonitorAPIVersion = "2018-01-01"
|
||||
|
||||
func (e *AzureMonitorDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
|
||||
e.proxy.Do(rw, req, cli)
|
||||
}
|
||||
|
||||
// executeTimeSeriesQuery does the following:
|
||||
// 1. build the AzureMonitor url and querystring for each query
|
||||
// 2. executes each query by calling the Azure Monitor API
|
||||
// 3. parses the responses for each query into data frames
|
||||
func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
|
||||
func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string) (*backend.QueryDataResponse, error) {
|
||||
result := backend.NewQueryDataResponse()
|
||||
|
||||
queries, err := e.buildQueries(originalQueries, dsInfo)
|
||||
@ -43,7 +49,7 @@ func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, ori
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
queryRes, resp, err := e.executeQuery(ctx, query, dsInfo)
|
||||
queryRes, resp, err := e.executeQuery(ctx, query, dsInfo, client, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -149,10 +155,10 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
|
||||
return azureMonitorQueries, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, dsInfo datasourceInfo) (backend.DataResponse, AzureMonitorResponse, error) {
|
||||
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, dsInfo datasourceInfo, cli *http.Client, url string) (backend.DataResponse, AzureMonitorResponse, error) {
|
||||
dataResponse := backend.DataResponse{}
|
||||
|
||||
req, err := e.createRequest(ctx, dsInfo)
|
||||
req, err := e.createRequest(ctx, dsInfo, url)
|
||||
if err != nil {
|
||||
dataResponse.Error = err
|
||||
return dataResponse, AzureMonitorResponse{}, nil
|
||||
@ -180,7 +186,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
|
||||
|
||||
azlog.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
|
||||
azlog.Debug("AzureMonitor", "Target", query.Target)
|
||||
res, err := ctxhttp.Do(ctx, dsInfo.Services[azureMonitor].HTTPClient, req)
|
||||
res, err := ctxhttp.Do(ctx, cli, req)
|
||||
if err != nil {
|
||||
dataResponse.Error = err
|
||||
return dataResponse, AzureMonitorResponse{}, nil
|
||||
@ -200,8 +206,8 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
|
||||
return dataResponse, data, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo) (*http.Request, error) {
|
||||
req, err := http.NewRequest(http.MethodGet, dsInfo.Services[azureMonitor].URL, nil)
|
||||
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
azlog.Debug("Failed to create request", "error", err)
|
||||
return nil, errutil.Wrap("Failed to create request", err)
|
||||
|
@ -514,11 +514,8 @@ func loadTestFile(t *testing.T, name string) AzureMonitorResponse {
|
||||
|
||||
func TestAzureMonitorCreateRequest(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
dsInfo := datasourceInfo{
|
||||
Services: map[string]datasourceService{
|
||||
azureMonitor: {URL: "http://ds"},
|
||||
},
|
||||
}
|
||||
dsInfo := datasourceInfo{}
|
||||
url := "http://ds/"
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
@ -539,7 +536,7 @@ func TestAzureMonitorCreateRequest(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ds := AzureMonitorDatasource{}
|
||||
req, err := ds.createRequest(ctx, dsInfo)
|
||||
req, err := ds.createRequest(ctx, dsInfo, url)
|
||||
tt.Err(t, err)
|
||||
if req.URL.String() != tt.expectedURL {
|
||||
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
|
||||
|
125
pkg/tsdb/azuremonitor/azuremonitor-resource-handler.go
Normal file
125
pkg/tsdb/azuremonitor/azuremonitor-resource-handler.go
Normal file
@ -0,0 +1,125 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
|
||||
)
|
||||
|
||||
func getTarget(original string) (target string, err error) {
|
||||
splittedPath := strings.Split(original, "/")
|
||||
if len(splittedPath) < 3 {
|
||||
err = fmt.Errorf("the request should contain the service on its path")
|
||||
return
|
||||
}
|
||||
target = fmt.Sprintf("/%s", strings.Join(splittedPath[2:], "/"))
|
||||
return
|
||||
}
|
||||
|
||||
type httpServiceProxy struct{}
|
||||
|
||||
func (s *httpServiceProxy) Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter {
|
||||
res, err := cli.Do(req)
|
||||
if err != nil {
|
||||
rw.WriteHeader(http.StatusInternalServerError)
|
||||
_, err = rw.Write([]byte(fmt.Sprintf("unexpected error %v", err)))
|
||||
if err != nil {
|
||||
azlog.Error("Unable to write HTTP response", "error", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
defer func() {
|
||||
if err := res.Body.Close(); err != nil {
|
||||
azlog.Warn("Failed to close response body", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
rw.WriteHeader(http.StatusInternalServerError)
|
||||
_, err = rw.Write([]byte(fmt.Sprintf("unexpected error %v", err)))
|
||||
if err != nil {
|
||||
azlog.Error("Unable to write HTTP response", "error", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
rw.WriteHeader(res.StatusCode)
|
||||
_, err = rw.Write(body)
|
||||
if err != nil {
|
||||
azlog.Error("Unable to write HTTP response", "error", err)
|
||||
}
|
||||
|
||||
for k, v := range res.Header {
|
||||
rw.Header().Set(k, v[0])
|
||||
for _, v := range v[1:] {
|
||||
rw.Header().Add(k, v)
|
||||
}
|
||||
}
|
||||
// Returning the response write for testing purposes
|
||||
return rw
|
||||
}
|
||||
|
||||
func (s *Service) getDataSourceFromHTTPReq(req *http.Request) (datasourceInfo, error) {
|
||||
ctx := req.Context()
|
||||
pluginContext := httpadapter.PluginConfigFromContext(ctx)
|
||||
i, err := s.im.Get(pluginContext)
|
||||
if err != nil {
|
||||
return datasourceInfo{}, nil
|
||||
}
|
||||
ds, ok := i.(datasourceInfo)
|
||||
if !ok {
|
||||
return datasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance")
|
||||
}
|
||||
return ds, nil
|
||||
}
|
||||
|
||||
func writeResponse(rw http.ResponseWriter, code int, msg string) {
|
||||
rw.WriteHeader(http.StatusBadRequest)
|
||||
_, err := rw.Write([]byte(msg))
|
||||
if err != nil {
|
||||
azlog.Error("Unable to write HTTP response", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) resourceHandler(subDataSource string) func(rw http.ResponseWriter, req *http.Request) {
|
||||
return func(rw http.ResponseWriter, req *http.Request) {
|
||||
azlog.Debug("Received resource call", "url", req.URL.String(), "method", req.Method)
|
||||
|
||||
newPath, err := getTarget(req.URL.Path)
|
||||
if err != nil {
|
||||
writeResponse(rw, http.StatusBadRequest, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
dsInfo, err := s.getDataSourceFromHTTPReq(req)
|
||||
if err != nil {
|
||||
writeResponse(rw, http.StatusInternalServerError, fmt.Sprintf("unexpected error %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
service := dsInfo.Services[subDataSource]
|
||||
serviceURL, err := url.Parse(service.URL)
|
||||
if err != nil {
|
||||
writeResponse(rw, http.StatusInternalServerError, fmt.Sprintf("unexpected error %v", err))
|
||||
return
|
||||
}
|
||||
req.URL.Path = newPath
|
||||
req.URL.Host = serviceURL.Host
|
||||
req.URL.Scheme = serviceURL.Scheme
|
||||
|
||||
s.executors[subDataSource].resourceRequest(rw, req, service.HTTPClient)
|
||||
}
|
||||
}
|
||||
|
||||
// Route definitions shared with the frontend.
|
||||
// Check: /public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/common.ts <routeNames>
|
||||
func (s *Service) registerRoutes(mux *http.ServeMux) {
|
||||
mux.HandleFunc("/azuremonitor/", s.resourceHandler(azureMonitor))
|
||||
mux.HandleFunc("/appinsights/", s.resourceHandler(appInsights))
|
||||
mux.HandleFunc("/loganalytics/", s.resourceHandler(azureLogAnalytics))
|
||||
mux.HandleFunc("/resourcegraph/", s.resourceHandler(azureResourceGraph))
|
||||
}
|
122
pkg/tsdb/azuremonitor/azuremonitor-resource-handler_test.go
Normal file
122
pkg/tsdb/azuremonitor/azuremonitor-resource-handler_test.go
Normal file
@ -0,0 +1,122 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_parseResourcePath(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
original string
|
||||
expectedTarget string
|
||||
Err require.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
"Path with a subscription",
|
||||
"/azuremonitor/subscriptions/44693801",
|
||||
"/subscriptions/44693801",
|
||||
require.NoError,
|
||||
},
|
||||
{
|
||||
"Malformed path",
|
||||
"/subscriptions?44693801",
|
||||
"",
|
||||
require.Error,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
target, err := getTarget(tt.original)
|
||||
if target != tt.expectedTarget {
|
||||
t.Errorf("Unexpected target %s expecting %s", target, tt.expectedTarget)
|
||||
}
|
||||
tt.Err(t, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_proxyRequest(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
}{
|
||||
{"forwards headers and body"},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add("foo", "bar")
|
||||
_, err := w.Write([]byte("result"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}))
|
||||
req, err := http.NewRequest(http.MethodGet, srv.URL, nil)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
rw := httptest.NewRecorder()
|
||||
proxy := httpServiceProxy{}
|
||||
res := proxy.Do(rw, req, srv.Client())
|
||||
if res.Header().Get("foo") != "bar" {
|
||||
t.Errorf("Unexpected headers: %v", res.Header())
|
||||
}
|
||||
result := rw.Result()
|
||||
body, err := ioutil.ReadAll(result.Body)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
err = result.Body.Close()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if string(body) != "result" {
|
||||
t.Errorf("Unexpected body: %v", string(body))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type fakeProxy struct {
|
||||
requestedURL string
|
||||
}
|
||||
|
||||
func (s *fakeProxy) Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter {
|
||||
s.requestedURL = req.URL.String()
|
||||
return nil
|
||||
}
|
||||
|
||||
func Test_resourceHandler(t *testing.T) {
|
||||
proxy := &fakeProxy{}
|
||||
s := Service{
|
||||
im: &fakeInstance{
|
||||
services: map[string]datasourceService{
|
||||
azureMonitor: {
|
||||
URL: routes[setting.AzurePublic][azureMonitor].URL,
|
||||
HTTPClient: &http.Client{},
|
||||
},
|
||||
},
|
||||
},
|
||||
Cfg: &setting.Cfg{},
|
||||
executors: map[string]azDatasourceExecutor{
|
||||
azureMonitor: &AzureMonitorDatasource{
|
||||
proxy: proxy,
|
||||
},
|
||||
},
|
||||
}
|
||||
rw := httptest.NewRecorder()
|
||||
req, err := http.NewRequest(http.MethodGet, "http://foo/azuremonitor/subscriptions/44693801", nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error %v", err)
|
||||
}
|
||||
s.resourceHandler(azureMonitor)(rw, req)
|
||||
expectedURL := "https://management.azure.com/subscriptions/44693801"
|
||||
if proxy.requestedURL != expectedURL {
|
||||
t.Errorf("Unexpected result URL. Got %s, expecting %s", proxy.requestedURL, expectedURL)
|
||||
}
|
||||
}
|
@ -11,6 +11,7 @@ import (
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
@ -39,10 +40,17 @@ func init() {
|
||||
})
|
||||
}
|
||||
|
||||
type serviceProxy interface {
|
||||
Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
PluginManager plugins.Manager `inject:""`
|
||||
Cfg *setting.Cfg `inject:""`
|
||||
BackendPluginManager backendplugin.Manager `inject:""`
|
||||
HTTPClientProvider *httpclient.Provider `inject:""`
|
||||
im instancemgmt.InstanceManager
|
||||
executors map[string]azDatasourceExecutor
|
||||
}
|
||||
|
||||
type azureMonitorSettings struct {
|
||||
@ -55,9 +63,8 @@ type datasourceInfo struct {
|
||||
Cloud string
|
||||
Credentials azcredentials.AzureCredentials
|
||||
Settings azureMonitorSettings
|
||||
Services map[string]datasourceService
|
||||
Routes map[string]azRoute
|
||||
HTTPCliOpts httpclient.Options
|
||||
Services map[string]datasourceService
|
||||
|
||||
JSONData map[string]interface{}
|
||||
DecryptedSecureJSONData map[string]string
|
||||
@ -70,7 +77,19 @@ type datasourceService struct {
|
||||
HTTPClient *http.Client
|
||||
}
|
||||
|
||||
func NewInstanceSettings(cfg *setting.Cfg) datasource.InstanceFactoryFunc {
|
||||
func getDatasourceService(cfg *setting.Cfg, clientProvider httpclient.Provider, dsInfo datasourceInfo, routeName string) (datasourceService, error) {
|
||||
route := dsInfo.Routes[routeName]
|
||||
client, err := newHTTPClient(route, dsInfo, cfg, clientProvider)
|
||||
if err != nil {
|
||||
return datasourceService{}, err
|
||||
}
|
||||
return datasourceService{
|
||||
URL: dsInfo.Routes[routeName].URL,
|
||||
HTTPClient: client,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, executors map[string]azDatasourceExecutor) datasource.InstanceFactoryFunc {
|
||||
return func(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
|
||||
jsonData, err := simplejson.NewJson(settings.JSONData)
|
||||
if err != nil {
|
||||
@ -99,11 +118,6 @@ func NewInstanceSettings(cfg *setting.Cfg) datasource.InstanceFactoryFunc {
|
||||
return nil, fmt.Errorf("error getting credentials: %w", err)
|
||||
}
|
||||
|
||||
httpCliOpts, err := settings.HTTPClientOptions()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting http options: %w", err)
|
||||
}
|
||||
|
||||
model := datasourceInfo{
|
||||
Cloud: cloud,
|
||||
Credentials: credentials,
|
||||
@ -111,9 +125,16 @@ func NewInstanceSettings(cfg *setting.Cfg) datasource.InstanceFactoryFunc {
|
||||
JSONData: jsonDataObj,
|
||||
DecryptedSecureJSONData: settings.DecryptedSecureJSONData,
|
||||
DatasourceID: settings.ID,
|
||||
Services: map[string]datasourceService{},
|
||||
Routes: routes[cloud],
|
||||
HTTPCliOpts: httpCliOpts,
|
||||
Services: map[string]datasourceService{},
|
||||
}
|
||||
|
||||
for routeName := range executors {
|
||||
service, err := getDatasourceService(cfg, clientProvider, model, routeName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
model.Services[routeName] = service
|
||||
}
|
||||
|
||||
return model, nil
|
||||
@ -121,51 +142,60 @@ func NewInstanceSettings(cfg *setting.Cfg) datasource.InstanceFactoryFunc {
|
||||
}
|
||||
|
||||
type azDatasourceExecutor interface {
|
||||
executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error)
|
||||
executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string) (*backend.QueryDataResponse, error)
|
||||
resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client)
|
||||
}
|
||||
|
||||
func newExecutor(im instancemgmt.InstanceManager, cfg *setting.Cfg, executors map[string]azDatasourceExecutor) *datasource.QueryTypeMux {
|
||||
func (s *Service) getDataSourceFromPluginReq(req *backend.QueryDataRequest) (datasourceInfo, error) {
|
||||
i, err := s.im.Get(req.PluginContext)
|
||||
if err != nil {
|
||||
return datasourceInfo{}, err
|
||||
}
|
||||
dsInfo, ok := i.(datasourceInfo)
|
||||
if !ok {
|
||||
return datasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance")
|
||||
}
|
||||
dsInfo.OrgID = req.PluginContext.OrgID
|
||||
return dsInfo, nil
|
||||
}
|
||||
|
||||
func (s *Service) newMux() *datasource.QueryTypeMux {
|
||||
mux := datasource.NewQueryTypeMux()
|
||||
for dsType := range executors {
|
||||
for dsType := range s.executors {
|
||||
// Make a copy of the string to keep the reference after the iterator
|
||||
dst := dsType
|
||||
mux.HandleFunc(dsType, func(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
i, err := im.Get(req.PluginContext)
|
||||
executor := s.executors[dst]
|
||||
dsInfo, err := s.getDataSourceFromPluginReq(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dsInfo := i.(datasourceInfo)
|
||||
dsInfo.OrgID = req.PluginContext.OrgID
|
||||
ds := executors[dst]
|
||||
if _, ok := dsInfo.Services[dst]; !ok {
|
||||
// Create an HTTP Client if it has not been created before
|
||||
route := dsInfo.Routes[dst]
|
||||
client, err := newHTTPClient(route, dsInfo, cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dsInfo.Services[dst] = datasourceService{
|
||||
URL: dsInfo.Routes[dst].URL,
|
||||
HTTPClient: client,
|
||||
}
|
||||
service, ok := dsInfo.Services[dst]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("missing service for %s", dst)
|
||||
}
|
||||
return ds.executeTimeSeriesQuery(ctx, req.Queries, dsInfo)
|
||||
return executor.executeTimeSeriesQuery(ctx, req.Queries, dsInfo, service.HTTPClient, service.URL)
|
||||
})
|
||||
}
|
||||
return mux
|
||||
}
|
||||
|
||||
func (s *Service) Init() error {
|
||||
im := datasource.NewInstanceManager(NewInstanceSettings(s.Cfg))
|
||||
executors := map[string]azDatasourceExecutor{
|
||||
azureMonitor: &AzureMonitorDatasource{},
|
||||
appInsights: &ApplicationInsightsDatasource{},
|
||||
azureLogAnalytics: &AzureLogAnalyticsDatasource{},
|
||||
insightsAnalytics: &InsightsAnalyticsDatasource{},
|
||||
azureResourceGraph: &AzureResourceGraphDatasource{},
|
||||
proxy := &httpServiceProxy{}
|
||||
s.executors = map[string]azDatasourceExecutor{
|
||||
azureMonitor: &AzureMonitorDatasource{proxy: proxy},
|
||||
appInsights: &ApplicationInsightsDatasource{proxy: proxy},
|
||||
azureLogAnalytics: &AzureLogAnalyticsDatasource{proxy: proxy},
|
||||
insightsAnalytics: &InsightsAnalyticsDatasource{proxy: proxy},
|
||||
azureResourceGraph: &AzureResourceGraphDatasource{proxy: proxy},
|
||||
}
|
||||
s.im = datasource.NewInstanceManager(NewInstanceSettings(s.Cfg, *s.HTTPClientProvider, s.executors))
|
||||
mux := s.newMux()
|
||||
resourceMux := http.NewServeMux()
|
||||
s.registerRoutes(resourceMux)
|
||||
factory := coreplugin.New(backend.ServeOpts{
|
||||
QueryDataHandler: newExecutor(im, s.Cfg, executors),
|
||||
QueryDataHandler: mux,
|
||||
CallResourceHandler: httpadapter.New(resourceMux),
|
||||
})
|
||||
|
||||
if err := s.BackendPluginManager.Register(dsName, factory); err != nil {
|
||||
|
@ -2,11 +2,12 @@ package azuremonitor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
|
||||
@ -35,6 +36,7 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
JSONData: map[string]interface{}{"azureAuthType": "msi"},
|
||||
DatasourceID: 40,
|
||||
DecryptedSecureJSONData: map[string]string{"key": "value"},
|
||||
Services: map[string]datasourceService{},
|
||||
},
|
||||
Err: require.NoError,
|
||||
},
|
||||
@ -48,22 +50,25 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
factory := NewInstanceSettings(cfg)
|
||||
factory := NewInstanceSettings(cfg, httpclient.Provider{}, map[string]azDatasourceExecutor{})
|
||||
instance, err := factory(tt.settings)
|
||||
tt.Err(t, err)
|
||||
if !cmp.Equal(instance, tt.expectedModel, cmpopts.IgnoreFields(datasourceInfo{}, "Services", "HTTPCliOpts")) {
|
||||
if !cmp.Equal(instance, tt.expectedModel) {
|
||||
t.Errorf("Unexpected instance: %v", cmp.Diff(instance, tt.expectedModel))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type fakeInstance struct{}
|
||||
type fakeInstance struct {
|
||||
routes map[string]azRoute
|
||||
services map[string]datasourceService
|
||||
}
|
||||
|
||||
func (f *fakeInstance) Get(pluginContext backend.PluginContext) (instancemgmt.Instance, error) {
|
||||
return datasourceInfo{
|
||||
Services: map[string]datasourceService{},
|
||||
Routes: routes[azureMonitorPublic],
|
||||
Routes: f.routes,
|
||||
Services: f.services,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@ -77,19 +82,24 @@ type fakeExecutor struct {
|
||||
expectedURL string
|
||||
}
|
||||
|
||||
func (f *fakeExecutor) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
|
||||
if s, ok := dsInfo.Services[f.queryType]; !ok {
|
||||
func (f *fakeExecutor) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
|
||||
}
|
||||
|
||||
func (f *fakeExecutor) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string) (*backend.QueryDataResponse, error) {
|
||||
if client == nil {
|
||||
f.t.Errorf("The HTTP client for %s is missing", f.queryType)
|
||||
} else {
|
||||
if s.URL != f.expectedURL {
|
||||
f.t.Errorf("Unexpected URL %s wanted %s", s.URL, f.expectedURL)
|
||||
if url != f.expectedURL {
|
||||
f.t.Errorf("Unexpected URL %s wanted %s", url, f.expectedURL)
|
||||
}
|
||||
}
|
||||
return &backend.QueryDataResponse{}, nil
|
||||
}
|
||||
|
||||
func Test_newExecutor(t *testing.T) {
|
||||
cfg := &setting.Cfg{}
|
||||
func Test_newMux(t *testing.T) {
|
||||
cfg := &setting.Cfg{
|
||||
Azure: setting.AzureSettings{},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
@ -113,13 +123,26 @@ func Test_newExecutor(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
mux := newExecutor(&fakeInstance{}, cfg, map[string]azDatasourceExecutor{
|
||||
tt.queryType: &fakeExecutor{
|
||||
t: t,
|
||||
queryType: tt.queryType,
|
||||
expectedURL: tt.expectedURL,
|
||||
s := &Service{
|
||||
Cfg: cfg,
|
||||
im: &fakeInstance{
|
||||
routes: routes[azureMonitorPublic],
|
||||
services: map[string]datasourceService{
|
||||
tt.queryType: {
|
||||
URL: routes[azureMonitorPublic][tt.queryType].URL,
|
||||
HTTPClient: &http.Client{},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
executors: map[string]azDatasourceExecutor{
|
||||
tt.queryType: &fakeExecutor{
|
||||
t: t,
|
||||
queryType: tt.queryType,
|
||||
expectedURL: tt.expectedURL,
|
||||
},
|
||||
},
|
||||
}
|
||||
mux := s.newMux()
|
||||
res, err := mux.QueryData(context.TODO(), &backend.QueryDataRequest{
|
||||
PluginContext: backend.PluginContext{},
|
||||
Queries: []backend.DataQuery{
|
||||
|
@ -8,34 +8,39 @@ import (
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/aztokenprovider"
|
||||
)
|
||||
|
||||
func httpClientProvider(route azRoute, model datasourceInfo, cfg *setting.Cfg) (*httpclient.Provider, error) {
|
||||
var clientProvider *httpclient.Provider
|
||||
func getMiddlewares(route azRoute, model datasourceInfo, cfg *setting.Cfg) ([]httpclient.Middleware, error) {
|
||||
middlewares := []httpclient.Middleware{}
|
||||
|
||||
if len(route.Scopes) > 0 {
|
||||
tokenProvider, err := aztokenprovider.NewAzureAccessTokenProvider(cfg, model.Credentials)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
clientProvider = httpclient.NewProvider(httpclient.ProviderOptions{
|
||||
Middlewares: []httpclient.Middleware{
|
||||
aztokenprovider.AuthMiddleware(tokenProvider, route.Scopes),
|
||||
},
|
||||
})
|
||||
} else {
|
||||
clientProvider = httpclient.NewProvider()
|
||||
middlewares = append(middlewares, aztokenprovider.AuthMiddleware(tokenProvider, route.Scopes))
|
||||
}
|
||||
|
||||
return clientProvider, nil
|
||||
if _, ok := model.DecryptedSecureJSONData["appInsightsApiKey"]; ok && (route.URL == azAppInsights.URL || route.URL == azChinaAppInsights.URL) {
|
||||
// Inject API-Key for AppInsights
|
||||
apiKeyMiddleware := httpclient.MiddlewareFunc(func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {
|
||||
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
|
||||
req.Header.Set("X-API-Key", model.DecryptedSecureJSONData["appInsightsApiKey"])
|
||||
return next.RoundTrip(req)
|
||||
})
|
||||
})
|
||||
middlewares = append(middlewares, apiKeyMiddleware)
|
||||
}
|
||||
|
||||
return middlewares, nil
|
||||
}
|
||||
|
||||
func newHTTPClient(route azRoute, model datasourceInfo, cfg *setting.Cfg) (*http.Client, error) {
|
||||
model.HTTPCliOpts.Headers = route.Headers
|
||||
|
||||
clientProvider, err := httpClientProvider(route, model, cfg)
|
||||
func newHTTPClient(route azRoute, model datasourceInfo, cfg *setting.Cfg, clientProvider httpclient.Provider) (*http.Client, error) {
|
||||
m, err := getMiddlewares(route, model, cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return clientProvider.New(model.HTTPCliOpts)
|
||||
return clientProvider.New(httpclient.Options{
|
||||
Headers: route.Headers,
|
||||
Middlewares: m,
|
||||
})
|
||||
}
|
||||
|
@ -10,21 +10,37 @@ import (
|
||||
|
||||
func Test_httpCliProvider(t *testing.T) {
|
||||
cfg := &setting.Cfg{}
|
||||
model := datasourceInfo{
|
||||
Credentials: &azcredentials.AzureClientSecretCredentials{},
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
route azRoute
|
||||
model datasourceInfo
|
||||
expectedMiddlewares int
|
||||
Err require.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "creates an HTTP client with a middleware",
|
||||
name: "creates an HTTP client with a middleware due to the scope",
|
||||
route: azRoute{
|
||||
URL: "http://route",
|
||||
Scopes: []string{"http://route/.default"},
|
||||
},
|
||||
model: datasourceInfo{
|
||||
Credentials: &azcredentials.AzureClientSecretCredentials{},
|
||||
},
|
||||
expectedMiddlewares: 1,
|
||||
Err: require.NoError,
|
||||
},
|
||||
{
|
||||
name: "creates an HTTP client with a middleware due to an app key",
|
||||
route: azRoute{
|
||||
URL: azAppInsights.URL,
|
||||
Scopes: []string{},
|
||||
},
|
||||
model: datasourceInfo{
|
||||
Credentials: &azcredentials.AzureClientSecretCredentials{},
|
||||
DecryptedSecureJSONData: map[string]string{
|
||||
"appInsightsApiKey": "foo",
|
||||
},
|
||||
},
|
||||
expectedMiddlewares: 1,
|
||||
Err: require.NoError,
|
||||
},
|
||||
@ -34,20 +50,22 @@ func Test_httpCliProvider(t *testing.T) {
|
||||
URL: "http://route",
|
||||
Scopes: []string{},
|
||||
},
|
||||
// httpclient.NewProvider returns a client with 2 middlewares by default
|
||||
expectedMiddlewares: 2,
|
||||
model: datasourceInfo{
|
||||
Credentials: &azcredentials.AzureClientSecretCredentials{},
|
||||
},
|
||||
expectedMiddlewares: 0,
|
||||
Err: require.NoError,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
cli, err := httpClientProvider(tt.route, model, cfg)
|
||||
m, err := getMiddlewares(tt.route, tt.model, cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Cannot test that the cli middleware works properly since the azcore sdk
|
||||
// rejects the TLS certs (if provided)
|
||||
if len(cli.Opts.Middlewares) != tt.expectedMiddlewares {
|
||||
t.Errorf("Unexpected middlewares: %v", cli.Opts.Middlewares)
|
||||
if len(m) != tt.expectedMiddlewares {
|
||||
t.Errorf("Unexpected middlewares: %v", m)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -17,7 +17,9 @@ import (
|
||||
"golang.org/x/net/context/ctxhttp"
|
||||
)
|
||||
|
||||
type InsightsAnalyticsDatasource struct{}
|
||||
type InsightsAnalyticsDatasource struct {
|
||||
proxy serviceProxy
|
||||
}
|
||||
|
||||
type InsightsAnalyticsQuery struct {
|
||||
RefID string
|
||||
@ -31,8 +33,12 @@ type InsightsAnalyticsQuery struct {
|
||||
Target string
|
||||
}
|
||||
|
||||
func (e *InsightsAnalyticsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
|
||||
e.proxy.Do(rw, req, cli)
|
||||
}
|
||||
|
||||
func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context,
|
||||
originalQueries []backend.DataQuery, dsInfo datasourceInfo) (*backend.QueryDataResponse, error) {
|
||||
originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string) (*backend.QueryDataResponse, error) {
|
||||
result := backend.NewQueryDataResponse()
|
||||
|
||||
queries, err := e.buildQueries(originalQueries, dsInfo)
|
||||
@ -41,7 +47,7 @@ func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo)
|
||||
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo, client, url)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
@ -80,7 +86,7 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
|
||||
return iaQueries, nil
|
||||
}
|
||||
|
||||
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo datasourceInfo) backend.DataResponse {
|
||||
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo datasourceInfo, client *http.Client, url string) backend.DataResponse {
|
||||
dataResponse := backend.DataResponse{}
|
||||
|
||||
dataResponseError := func(err error) backend.DataResponse {
|
||||
@ -88,7 +94,7 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
|
||||
return dataResponse
|
||||
}
|
||||
|
||||
req, err := e.createRequest(ctx, dsInfo)
|
||||
req, err := e.createRequest(ctx, dsInfo, url)
|
||||
if err != nil {
|
||||
return dataResponseError(err)
|
||||
}
|
||||
@ -112,7 +118,7 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
|
||||
}
|
||||
|
||||
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String())
|
||||
res, err := ctxhttp.Do(ctx, dsInfo.Services[appInsights].HTTPClient, req)
|
||||
res, err := ctxhttp.Do(ctx, client, req)
|
||||
if err != nil {
|
||||
return dataResponseError(err)
|
||||
}
|
||||
@ -168,15 +174,14 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
|
||||
return dataResponse
|
||||
}
|
||||
|
||||
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo) (*http.Request, error) {
|
||||
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
|
||||
appInsightsAppID := dsInfo.Settings.AppInsightsAppId
|
||||
|
||||
req, err := http.NewRequest(http.MethodGet, dsInfo.Services[insightsAnalytics].URL, nil)
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
azlog.Debug("Failed to create request", "error", err)
|
||||
return nil, errutil.Wrap("Failed to create request", err)
|
||||
}
|
||||
req.Header.Set("X-API-Key", dsInfo.DecryptedSecureJSONData["appInsightsApiKey"])
|
||||
req.URL.Path = fmt.Sprintf("/v1/apps/%s", appInsightsAppID)
|
||||
return req, nil
|
||||
}
|
||||
|
@ -5,17 +5,14 @@ import (
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestInsightsAnalyticsCreateRequest(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
url := "http://ds"
|
||||
dsInfo := datasourceInfo{
|
||||
Settings: azureMonitorSettings{AppInsightsAppId: "foo"},
|
||||
Services: map[string]datasourceService{
|
||||
insightsAnalytics: {URL: "http://ds"},
|
||||
},
|
||||
DecryptedSecureJSONData: map[string]string{
|
||||
"appInsightsApiKey": "key",
|
||||
},
|
||||
@ -30,24 +27,18 @@ func TestInsightsAnalyticsCreateRequest(t *testing.T) {
|
||||
{
|
||||
name: "creates a request",
|
||||
expectedURL: "http://ds/v1/apps/foo",
|
||||
expectedHeaders: http.Header{
|
||||
"X-Api-Key": []string{"key"},
|
||||
},
|
||||
Err: require.NoError,
|
||||
Err: require.NoError,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ds := InsightsAnalyticsDatasource{}
|
||||
req, err := ds.createRequest(ctx, dsInfo)
|
||||
req, err := ds.createRequest(ctx, dsInfo, url)
|
||||
tt.Err(t, err)
|
||||
if req.URL.String() != tt.expectedURL {
|
||||
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
|
||||
}
|
||||
if !cmp.Equal(req.Header, tt.expectedHeaders) {
|
||||
t.Errorf("Unexpected HTTP headers: %v", cmp.Diff(req.Header, tt.expectedHeaders))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1,38 +0,0 @@
|
||||
export function getManagementApiRoute(azureCloud: string): string {
|
||||
switch (azureCloud) {
|
||||
case 'azuremonitor':
|
||||
return 'azuremonitor';
|
||||
case 'chinaazuremonitor':
|
||||
return 'chinaazuremonitor';
|
||||
case 'govazuremonitor':
|
||||
return 'govazuremonitor';
|
||||
case 'germanyazuremonitor':
|
||||
return 'germanyazuremonitor';
|
||||
default:
|
||||
throw new Error('The cloud not supported.');
|
||||
}
|
||||
}
|
||||
|
||||
export function getLogAnalyticsApiRoute(azureCloud: string): string {
|
||||
switch (azureCloud) {
|
||||
case 'azuremonitor':
|
||||
return 'loganalyticsazure';
|
||||
case 'chinaazuremonitor':
|
||||
return 'chinaloganalyticsazure';
|
||||
case 'govazuremonitor':
|
||||
return 'govloganalyticsazure';
|
||||
default:
|
||||
throw new Error('The cloud not supported.');
|
||||
}
|
||||
}
|
||||
|
||||
export function getAppInsightsApiRoute(azureCloud: string): string {
|
||||
switch (azureCloud) {
|
||||
case 'azuremonitor':
|
||||
return 'appinsights';
|
||||
case 'chinaazuremonitor':
|
||||
return 'chinaappinsights';
|
||||
default:
|
||||
throw new Error('The cloud not supported.');
|
||||
}
|
||||
}
|
@ -15,7 +15,6 @@ jest.mock('@grafana/runtime', () => ({
|
||||
}));
|
||||
|
||||
describe('AppInsightsDatasource', () => {
|
||||
const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest');
|
||||
const fetchMock = jest.spyOn(backendSrv, 'fetch');
|
||||
|
||||
const ctx: any = {};
|
||||
@ -53,8 +52,8 @@ describe('AppInsightsDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation(() => {
|
||||
return Promise.resolve({ data: response, status: 200 });
|
||||
ctx.ds.getResource = jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
|
||||
@ -78,7 +77,7 @@ describe('AppInsightsDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation(() => {
|
||||
ctx.ds.getResource = jest.fn().mockImplementation(() => {
|
||||
return Promise.reject(error);
|
||||
});
|
||||
});
|
||||
@ -106,7 +105,7 @@ describe('AppInsightsDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation(() => {
|
||||
ctx.ds.getResource = jest.fn().mockImplementation(() => {
|
||||
return Promise.reject(error);
|
||||
});
|
||||
});
|
||||
@ -419,9 +418,9 @@ describe('AppInsightsDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('/metrics/metadata');
|
||||
return Promise.resolve({ data: response, status: 200 });
|
||||
ctx.ds.getResource = jest.fn().mockImplementation((path) => {
|
||||
expect(path).toContain('/metrics/metadata');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
|
||||
@ -457,9 +456,9 @@ describe('AppInsightsDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('/metrics/metadata');
|
||||
return Promise.resolve({ data: response, status: 200 });
|
||||
ctx.ds.getResource = jest.fn().mockImplementation((path) => {
|
||||
expect(path).toContain('/metrics/metadata');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
|
||||
@ -485,8 +484,8 @@ describe('AppInsightsDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('/metrics/metadata');
|
||||
ctx.ds.getResource = jest.fn().mockImplementation((path) => {
|
||||
expect(path).toContain('/metrics/metadata');
|
||||
return Promise.resolve({ data: response, status: 200 });
|
||||
});
|
||||
});
|
||||
@ -523,8 +522,8 @@ describe('AppInsightsDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('/metrics/metadata');
|
||||
ctx.ds.getResource = jest.fn().mockImplementation((path) => {
|
||||
expect(path).toContain('/metrics/metadata');
|
||||
return Promise.resolve({ data: response, status: 200 });
|
||||
});
|
||||
});
|
||||
|
@ -1,12 +1,11 @@
|
||||
import { DataQueryRequest, DataSourceInstanceSettings, ScopedVars, MetricFindValue } from '@grafana/data';
|
||||
import { getBackendSrv, getTemplateSrv, DataSourceWithBackend } from '@grafana/runtime';
|
||||
import { getTemplateSrv, DataSourceWithBackend } from '@grafana/runtime';
|
||||
import { isString } from 'lodash';
|
||||
|
||||
import TimegrainConverter from '../time_grain_converter';
|
||||
import { AzureDataSourceJsonData, AzureMonitorQuery, AzureQueryType, DatasourceValidationResult } from '../types';
|
||||
import { routeNames } from '../utils/common';
|
||||
import ResponseParser from './response_parser';
|
||||
import { getAzureCloud } from '../credentials';
|
||||
import { getAppInsightsApiRoute } from '../api/routes';
|
||||
|
||||
export interface LogAnalyticsColumn {
|
||||
text: string;
|
||||
@ -14,8 +13,7 @@ export interface LogAnalyticsColumn {
|
||||
}
|
||||
|
||||
export default class AppInsightsDatasource extends DataSourceWithBackend<AzureMonitorQuery, AzureDataSourceJsonData> {
|
||||
url: string;
|
||||
baseUrl: string;
|
||||
resourcePath: string;
|
||||
version = 'beta';
|
||||
applicationId: string;
|
||||
logAnalyticsColumns: { [key: string]: LogAnalyticsColumn[] } = {};
|
||||
@ -24,11 +22,7 @@ export default class AppInsightsDatasource extends DataSourceWithBackend<AzureMo
|
||||
super(instanceSettings);
|
||||
this.applicationId = instanceSettings.jsonData.appInsightsAppId || '';
|
||||
|
||||
const cloud = getAzureCloud(instanceSettings);
|
||||
const route = getAppInsightsApiRoute(cloud);
|
||||
this.baseUrl = `/${route}/${this.version}/apps/${this.applicationId}`;
|
||||
|
||||
this.url = instanceSettings.url || '';
|
||||
this.resourcePath = `${routeNames.appInsights}/${this.version}/apps/${this.applicationId}`;
|
||||
}
|
||||
|
||||
isConfigured(): boolean {
|
||||
@ -134,20 +128,13 @@ export default class AppInsightsDatasource extends DataSourceWithBackend<AzureMo
|
||||
}
|
||||
|
||||
testDatasource(): Promise<DatasourceValidationResult> {
|
||||
const url = `${this.baseUrl}/metrics/metadata`;
|
||||
return this.doRequest(url)
|
||||
const path = `${this.resourcePath}/metrics/metadata`;
|
||||
return this.getResource(path)
|
||||
.then<DatasourceValidationResult>((response: any) => {
|
||||
if (response.status === 200) {
|
||||
return {
|
||||
status: 'success',
|
||||
message: 'Successfully queried the Application Insights service.',
|
||||
title: 'Success',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
status: 'error',
|
||||
message: 'Application Insights: Returned http status code ' + response.status,
|
||||
status: 'success',
|
||||
message: 'Successfully queried the Application Insights service.',
|
||||
title: 'Success',
|
||||
};
|
||||
})
|
||||
.catch((error: any) => {
|
||||
@ -169,29 +156,14 @@ export default class AppInsightsDatasource extends DataSourceWithBackend<AzureMo
|
||||
});
|
||||
}
|
||||
|
||||
doRequest(url: any, maxRetries = 1): Promise<any> {
|
||||
return getBackendSrv()
|
||||
.datasourceRequest({
|
||||
url: this.url + url,
|
||||
method: 'GET',
|
||||
})
|
||||
.catch((error: any) => {
|
||||
if (maxRetries > 0) {
|
||||
return this.doRequest(url, maxRetries - 1);
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
getMetricNames() {
|
||||
const url = `${this.baseUrl}/metrics/metadata`;
|
||||
return this.doRequest(url).then(ResponseParser.parseMetricNames);
|
||||
const path = `${this.resourcePath}/metrics/metadata`;
|
||||
return this.getResource(path).then(ResponseParser.parseMetricNames);
|
||||
}
|
||||
|
||||
getMetricMetadata(metricName: string) {
|
||||
const url = `${this.baseUrl}/metrics/metadata`;
|
||||
return this.doRequest(url).then((result: any) => {
|
||||
const path = `${this.resourcePath}/metrics/metadata`;
|
||||
return this.getResource(path).then((result: any) => {
|
||||
return new ResponseParser(result).parseMetadata(metricName);
|
||||
});
|
||||
}
|
||||
@ -203,8 +175,8 @@ export default class AppInsightsDatasource extends DataSourceWithBackend<AzureMo
|
||||
}
|
||||
|
||||
getQuerySchema() {
|
||||
const url = `${this.baseUrl}/query/schema`;
|
||||
return this.doRequest(url).then((result: any) => {
|
||||
const path = `${this.resourcePath}/query/schema`;
|
||||
return this.getResource(path).then((result: any) => {
|
||||
const schema = new ResponseParser(result).parseQuerySchema();
|
||||
// console.log(schema);
|
||||
return schema;
|
||||
|
@ -12,11 +12,11 @@ export default class ResponseParser {
|
||||
const xaxis = this.results[i].query.xaxis;
|
||||
const yaxises = this.results[i].query.yaxis;
|
||||
const spliton = this.results[i].query.spliton;
|
||||
columns = this.results[i].result.data.Tables[0].Columns;
|
||||
const rows = this.results[i].result.data.Tables[0].Rows;
|
||||
columns = this.results[i].result.Tables[0].Columns;
|
||||
const rows = this.results[i].result.Tables[0].Rows;
|
||||
data = concat(data, this.parseRawQueryResultRow(this.results[i].query, columns, rows, xaxis, yaxises, spliton));
|
||||
} else {
|
||||
const value = this.results[i].result.data.value;
|
||||
const value = this.results[i].result.value;
|
||||
const alias = this.results[i].query.alias;
|
||||
data = concat(data, this.parseQueryResultRow(this.results[i].query, value, alias));
|
||||
}
|
||||
@ -174,14 +174,14 @@ export default class ResponseParser {
|
||||
return dateTime(dateTimeValue).valueOf();
|
||||
}
|
||||
|
||||
static parseMetricNames(result: { data: { metrics: any } }) {
|
||||
const keys = _keys(result.data.metrics);
|
||||
static parseMetricNames(result: { metrics: any }) {
|
||||
const keys = _keys(result.metrics);
|
||||
|
||||
return ResponseParser.toTextValueList(keys);
|
||||
}
|
||||
|
||||
parseMetadata(metricName: string) {
|
||||
const metric = this.results.data.metrics[metricName];
|
||||
const metric = this.results.metrics[metricName];
|
||||
|
||||
if (!metric) {
|
||||
throw Error('No data found for metric: ' + metricName);
|
||||
@ -203,9 +203,9 @@ export default class ResponseParser {
|
||||
Type: 'AppInsights',
|
||||
Tables: {},
|
||||
};
|
||||
if (this.results && this.results.data && this.results.data.Tables) {
|
||||
for (let i = 0; i < this.results.data.Tables[0].Rows.length; i++) {
|
||||
const column = this.results.data.Tables[0].Rows[i];
|
||||
if (this.results && this.results && this.results.Tables) {
|
||||
for (let i = 0; i < this.results.Tables[0].Rows.length; i++) {
|
||||
const column = this.results.Tables[0].Rows[i];
|
||||
const columnTable = column[0];
|
||||
const columnName = column[1];
|
||||
const columnType = column[2];
|
||||
|
@ -3,14 +3,12 @@ import FakeSchemaData from './__mocks__/schema';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { AzureLogsVariable, DatasourceValidationResult } from '../types';
|
||||
import { toUtc } from '@grafana/data';
|
||||
import { backendSrv } from 'app/core/services/backend_srv';
|
||||
|
||||
const templateSrv = new TemplateSrv();
|
||||
|
||||
jest.mock('app/core/services/backend_srv');
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...((jest.requireActual('@grafana/runtime') as unknown) as object),
|
||||
getBackendSrv: () => backendSrv,
|
||||
getTemplateSrv: () => templateSrv,
|
||||
}));
|
||||
|
||||
@ -22,13 +20,6 @@ const makeResourceURI = (
|
||||
`/subscriptions/${subscriptionID}/resourceGroups/${resourceGroup}/providers/Microsoft.OperationalInsights/workspaces/${resourceName}`;
|
||||
|
||||
describe('AzureLogAnalyticsDatasource', () => {
|
||||
const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
datasourceRequestMock.mockImplementation(jest.fn());
|
||||
});
|
||||
|
||||
const ctx: any = {};
|
||||
|
||||
beforeEach(() => {
|
||||
@ -40,64 +31,6 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
|
||||
});
|
||||
|
||||
describe('When the config option "Same as Azure Monitor" has been chosen', () => {
|
||||
const tableResponseWithOneColumn = {
|
||||
tables: [
|
||||
{
|
||||
name: 'PrimaryResult',
|
||||
columns: [
|
||||
{
|
||||
name: 'Category',
|
||||
type: 'string',
|
||||
},
|
||||
],
|
||||
rows: [['Administrative'], ['Policy']],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const workspaceResponse = {
|
||||
value: [
|
||||
{
|
||||
name: 'aworkspace',
|
||||
id: makeResourceURI('a-workspace'),
|
||||
properties: {
|
||||
source: 'Azure',
|
||||
customerId: 'abc1b44e-3e57-4410-b027-6cc0ae6dee67',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let workspacesUrl: string;
|
||||
let azureLogAnalyticsUrl: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
ctx.instanceSettings.jsonData.subscriptionId = 'xxx';
|
||||
ctx.instanceSettings.jsonData.tenantId = 'xxx';
|
||||
ctx.instanceSettings.jsonData.clientId = 'xxx';
|
||||
ctx.instanceSettings.jsonData.azureLogAnalyticsSameAs = true;
|
||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
|
||||
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces?api-version') > -1) {
|
||||
workspacesUrl = options.url;
|
||||
return Promise.resolve({ data: workspaceResponse, status: 200 });
|
||||
} else {
|
||||
azureLogAnalyticsUrl = options.url;
|
||||
return Promise.resolve({ data: tableResponseWithOneColumn, status: 200 });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should use the loganalyticsazure plugin route', async () => {
|
||||
await ctx.ds.metricFindQuery('workspace("aworkspace").AzureActivity | distinct Category');
|
||||
|
||||
expect(workspacesUrl).toContain('azuremonitor');
|
||||
expect(azureLogAnalyticsUrl).toContain('loganalyticsazure');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing testDatasource', () => {
|
||||
describe('and an error is returned', () => {
|
||||
const error = {
|
||||
@ -113,7 +46,7 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.instanceSettings.jsonData.azureAuthType = 'msi';
|
||||
datasourceRequestMock.mockImplementation(() => Promise.reject(error));
|
||||
ctx.ds.azureLogAnalyticsDatasource.getResource = jest.fn().mockRejectedValue(error);
|
||||
});
|
||||
|
||||
it('should return error status and a detailed error message', () => {
|
||||
@ -129,9 +62,9 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
|
||||
describe('When performing getSchema', () => {
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('metadata');
|
||||
return Promise.resolve({ data: FakeSchemaData.getlogAnalyticsFakeMetadata(), status: 200, ok: true });
|
||||
ctx.ds.azureLogAnalyticsDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
expect(path).toContain('metadata');
|
||||
return Promise.resolve(FakeSchemaData.getlogAnalyticsFakeMetadata());
|
||||
});
|
||||
});
|
||||
|
||||
@ -191,9 +124,9 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
|
||||
describe('and is the workspaces() macro', () => {
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('xxx');
|
||||
return Promise.resolve({ data: workspacesResponse, status: 200 });
|
||||
ctx.ds.azureLogAnalyticsDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
expect(path).toContain('xxx');
|
||||
return Promise.resolve(workspacesResponse);
|
||||
});
|
||||
|
||||
queryResults = await ctx.ds.metricFindQuery('workspaces()');
|
||||
@ -209,9 +142,9 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
|
||||
describe('and is the workspaces() macro with the subscription parameter', () => {
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('11112222-eeee-4949-9b2d-9106972f9123');
|
||||
return Promise.resolve({ data: workspacesResponse, status: 200 });
|
||||
ctx.ds.azureLogAnalyticsDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
expect(path).toContain('11112222-eeee-4949-9b2d-9106972f9123');
|
||||
return Promise.resolve(workspacesResponse);
|
||||
});
|
||||
|
||||
queryResults = await ctx.ds.metricFindQuery('workspaces(11112222-eeee-4949-9b2d-9106972f9123)');
|
||||
@ -227,9 +160,9 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
|
||||
describe('and is the workspaces() macro with the subscription parameter quoted', () => {
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('11112222-eeee-4949-9b2d-9106972f9123');
|
||||
return Promise.resolve({ data: workspacesResponse, status: 200 });
|
||||
ctx.ds.azureLogAnalyticsDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
expect(path).toContain('11112222-eeee-4949-9b2d-9106972f9123');
|
||||
return Promise.resolve(workspacesResponse);
|
||||
});
|
||||
|
||||
queryResults = await ctx.ds.metricFindQuery('workspaces("11112222-eeee-4949-9b2d-9106972f9123")');
|
||||
@ -273,11 +206,11 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
if (options.url.indexOf('OperationalInsights/workspaces?api-version=') > -1) {
|
||||
return Promise.resolve({ data: workspaceResponse, status: 200 });
|
||||
ctx.ds.azureLogAnalyticsDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
if (path.indexOf('OperationalInsights/workspaces?api-version=') > -1) {
|
||||
return Promise.resolve(workspaceResponse);
|
||||
} else {
|
||||
return Promise.resolve({ data: tableResponseWithOneColumn, status: 200 });
|
||||
return Promise.resolve(tableResponseWithOneColumn);
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -337,11 +270,11 @@ describe('AzureLogAnalyticsDatasource', () => {
|
||||
let annotationResults: any[];
|
||||
|
||||
beforeEach(async () => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
|
||||
return Promise.resolve({ data: workspaceResponse, status: 200 });
|
||||
ctx.ds.azureLogAnalyticsDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
if (path.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
|
||||
return Promise.resolve(workspaceResponse);
|
||||
} else {
|
||||
return Promise.resolve({ data: tableResponse, status: 200 });
|
||||
return Promise.resolve(tableResponse);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -15,17 +15,16 @@ import {
|
||||
DataSourceInstanceSettings,
|
||||
MetricFindValue,
|
||||
} from '@grafana/data';
|
||||
import { getBackendSrv, getTemplateSrv, DataSourceWithBackend, FetchResponse } from '@grafana/runtime';
|
||||
import { getTemplateSrv, DataSourceWithBackend } from '@grafana/runtime';
|
||||
import { Observable, from } from 'rxjs';
|
||||
import { mergeMap } from 'rxjs/operators';
|
||||
import { getAuthType, getAzureCloud, getAzurePortalUrl } from '../credentials';
|
||||
import { getLogAnalyticsApiRoute, getManagementApiRoute } from '../api/routes';
|
||||
import { AzureLogAnalyticsMetadata } from '../types/logAnalyticsMetadata';
|
||||
import { isGUIDish } from '../components/ResourcePicker/utils';
|
||||
import { routeNames } from '../utils/common';
|
||||
|
||||
interface AdhocQuery {
|
||||
datasourceId: number;
|
||||
url: string;
|
||||
path: string;
|
||||
resultFormat: string;
|
||||
}
|
||||
|
||||
@ -33,14 +32,13 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
AzureMonitorQuery,
|
||||
AzureDataSourceJsonData
|
||||
> {
|
||||
url: string;
|
||||
baseUrl: string;
|
||||
resourcePath: string;
|
||||
azurePortalUrl: string;
|
||||
applicationId: string;
|
||||
|
||||
defaultSubscriptionId?: string;
|
||||
|
||||
azureMonitorUrl: string;
|
||||
azureMonitorPath: string;
|
||||
defaultOrFirstWorkspace: string;
|
||||
cache: Map<string, any>;
|
||||
|
||||
@ -48,15 +46,11 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
super(instanceSettings);
|
||||
this.cache = new Map();
|
||||
|
||||
this.resourcePath = `${routeNames.logAnalytics}`;
|
||||
this.azureMonitorPath = `${routeNames.azureMonitor}/subscriptions`;
|
||||
const cloud = getAzureCloud(instanceSettings);
|
||||
const logAnalyticsRoute = getLogAnalyticsApiRoute(cloud);
|
||||
this.baseUrl = `/${logAnalyticsRoute}`;
|
||||
this.azurePortalUrl = getAzurePortalUrl(cloud);
|
||||
|
||||
const managementRoute = getManagementApiRoute(cloud);
|
||||
this.azureMonitorUrl = `/${managementRoute}/subscriptions`;
|
||||
|
||||
this.url = instanceSettings.url || '';
|
||||
this.defaultSubscriptionId = this.instanceSettings.jsonData.subscriptionId || '';
|
||||
this.defaultOrFirstWorkspace = this.instanceSettings.jsonData.logAnalyticsDefaultWorkspace || '';
|
||||
}
|
||||
@ -71,8 +65,8 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
return [];
|
||||
}
|
||||
|
||||
const url = `${this.azureMonitorUrl}?api-version=2019-03-01`;
|
||||
return await this.doRequest(url).then((result: any) => {
|
||||
const path = `${this.azureMonitorPath}?api-version=2019-03-01`;
|
||||
return await this.getResource(path).then((result: any) => {
|
||||
return ResponseParser.parseSubscriptions(result);
|
||||
});
|
||||
}
|
||||
@ -81,7 +75,7 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
const response = await this.getWorkspaceList(subscription);
|
||||
|
||||
return (
|
||||
map(response.data.value, (val: any) => {
|
||||
map(response.value, (val: any) => {
|
||||
return { text: val.name, value: val.id };
|
||||
}) || []
|
||||
);
|
||||
@ -91,20 +85,16 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
const subscriptionId = getTemplateSrv().replace(subscription || this.defaultSubscriptionId);
|
||||
|
||||
const workspaceListUrl =
|
||||
this.azureMonitorUrl +
|
||||
this.azureMonitorPath +
|
||||
`/${subscriptionId}/providers/Microsoft.OperationalInsights/workspaces?api-version=2017-04-26-preview`;
|
||||
return this.doRequest(workspaceListUrl, true);
|
||||
return this.getResource(workspaceListUrl);
|
||||
}
|
||||
|
||||
async getMetadata(resourceUri: string) {
|
||||
const url = `${this.baseUrl}/v1${resourceUri}/metadata`;
|
||||
const path = `${this.resourcePath}/v1${resourceUri}/metadata`;
|
||||
|
||||
const resp = await this.doRequest<AzureLogAnalyticsMetadata>(url);
|
||||
if (!resp.ok) {
|
||||
throw new Error('Unable to get metadata for workspace');
|
||||
}
|
||||
|
||||
return resp.data;
|
||||
const resp = await this.getResource(path);
|
||||
return resp;
|
||||
}
|
||||
|
||||
async getKustoSchema(resourceUri: string) {
|
||||
@ -202,7 +192,7 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
}
|
||||
const response = await this.getWorkspaceList(this.defaultSubscriptionId);
|
||||
|
||||
const details = response.data.value.find((o: any) => {
|
||||
const details = response.value.find((o: any) => {
|
||||
return o.properties.customerId === workspaceId;
|
||||
});
|
||||
|
||||
@ -286,14 +276,14 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
);
|
||||
|
||||
const querystring = querystringBuilder.generate().uriString;
|
||||
const url = isGUIDish(workspace)
|
||||
? `${this.baseUrl}/v1/workspaces/${workspace}/query?${querystring}`
|
||||
: `${this.baseUrl}/v1${workspace}/query?${querystring}`;
|
||||
const path = isGUIDish(workspace)
|
||||
? `${this.resourcePath}/v1/workspaces/${workspace}/query?${querystring}`
|
||||
: `${this.resourcePath}/v1${workspace}/query?${querystring}`;
|
||||
|
||||
const queries = [
|
||||
{
|
||||
datasourceId: this.id,
|
||||
url: url,
|
||||
path: path,
|
||||
resultFormat: 'table',
|
||||
},
|
||||
];
|
||||
@ -370,7 +360,7 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
|
||||
doQueries(queries: AdhocQuery[]) {
|
||||
return map(queries, (query) => {
|
||||
return this.doRequest(query.url)
|
||||
return this.getResource(query.path)
|
||||
.then((result: any) => {
|
||||
return {
|
||||
result: result,
|
||||
@ -386,32 +376,6 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
});
|
||||
}
|
||||
|
||||
async doRequest<T = any>(url: string, useCache = false, maxRetries = 1): Promise<FetchResponse<T>> {
|
||||
try {
|
||||
if (useCache && this.cache.has(url)) {
|
||||
return this.cache.get(url);
|
||||
}
|
||||
|
||||
const res = await getBackendSrv().datasourceRequest({
|
||||
url: this.url + url,
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
if (useCache) {
|
||||
this.cache.set(url, res);
|
||||
}
|
||||
|
||||
return res;
|
||||
} catch (error) {
|
||||
if (maxRetries > 0) {
|
||||
return this.doRequest(url, useCache, maxRetries - 1);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: update to be completely resource-centric
|
||||
async testDatasource(): Promise<DatasourceValidationResult> {
|
||||
const validationError = this.validateDatasource();
|
||||
if (validationError) {
|
||||
@ -437,22 +401,15 @@ export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||
}
|
||||
|
||||
try {
|
||||
const url = isGUIDish(resourceOrWorkspace)
|
||||
? `${this.baseUrl}/v1/workspaces/${resourceOrWorkspace}/metadata`
|
||||
: `${this.baseUrl}/v1${resourceOrWorkspace}/metadata`;
|
||||
|
||||
return await this.doRequest(url).then<DatasourceValidationResult>((response: any) => {
|
||||
if (response.status === 200) {
|
||||
return {
|
||||
status: 'success',
|
||||
message: 'Successfully queried the Azure Log Analytics service.',
|
||||
title: 'Success',
|
||||
};
|
||||
}
|
||||
const path = isGUIDish(resourceOrWorkspace)
|
||||
? `${this.resourcePath}/v1/workspaces/${resourceOrWorkspace}/metadata`
|
||||
: `${this.resourcePath}/v1/${resourceOrWorkspace}/metadata`;
|
||||
|
||||
return await this.getResource(path).then<DatasourceValidationResult>((response: any) => {
|
||||
return {
|
||||
status: 'error',
|
||||
message: 'Returned http status code ' + response.status,
|
||||
status: 'success',
|
||||
message: 'Successfully queried the Azure Log Analytics service.',
|
||||
title: 'Success',
|
||||
};
|
||||
});
|
||||
} catch (e) {
|
||||
|
@ -11,11 +11,11 @@ export default class ResponseParser {
|
||||
let data: any[] = [];
|
||||
let columns: any[] = [];
|
||||
for (let i = 0; i < this.results.length; i++) {
|
||||
if (this.results[i].result.data.tables.length === 0) {
|
||||
if (this.results[i].result.tables.length === 0) {
|
||||
continue;
|
||||
}
|
||||
columns = this.results[i].result.data.tables[0].columns;
|
||||
const rows = this.results[i].result.data.tables[0].rows;
|
||||
columns = this.results[i].result.tables[0].columns;
|
||||
const rows = this.results[i].result.tables[0].rows;
|
||||
|
||||
if (this.results[i].query.resultFormat === 'time_series') {
|
||||
data = concat(data, this.parseTimeSeriesResult(this.results[i].query, columns, rows));
|
||||
@ -157,11 +157,11 @@ export default class ResponseParser {
|
||||
|
||||
const valueFieldName = 'subscriptionId';
|
||||
const textFieldName = 'displayName';
|
||||
for (let i = 0; i < result.data.value.length; i++) {
|
||||
if (!find(list, ['value', get(result.data.value[i], valueFieldName)])) {
|
||||
for (let i = 0; i < result.value.length; i++) {
|
||||
if (!find(list, ['value', get(result.value[i], valueFieldName)])) {
|
||||
list.push({
|
||||
text: `${get(result.data.value[i], textFieldName)}`,
|
||||
value: get(result.data.value[i], valueFieldName),
|
||||
text: `${get(result.value[i], textFieldName)}`,
|
||||
value: get(result.value[i], valueFieldName),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -2,14 +2,12 @@ import AzureMonitorDatasource from '../datasource';
|
||||
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { DataSourceInstanceSettings } from '@grafana/data';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
import { AzureDataSourceJsonData, DatasourceValidationResult } from '../types';
|
||||
|
||||
const templateSrv = new TemplateSrv();
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...((jest.requireActual('@grafana/runtime') as unknown) as object),
|
||||
getBackendSrv: () => backendSrv,
|
||||
getTemplateSrv: () => templateSrv,
|
||||
}));
|
||||
|
||||
@ -20,15 +18,13 @@ interface TestContext {
|
||||
|
||||
describe('AzureMonitorDatasource', () => {
|
||||
const ctx: TestContext = {} as TestContext;
|
||||
const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
ctx.instanceSettings = ({
|
||||
name: 'test',
|
||||
url: 'http://azuremonitor.com',
|
||||
jsonData: { subscriptionId: '9935389e-9122-4ef9-95f9-1513dd24753f' },
|
||||
cloudName: 'azuremonitor',
|
||||
jsonData: { subscriptionId: '9935389e-9122-4ef9-95f9-1513dd24753f', cloudName: 'azuremonitor' },
|
||||
} as unknown) as DataSourceInstanceSettings<AzureDataSourceJsonData>;
|
||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
|
||||
});
|
||||
@ -48,7 +44,7 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.instanceSettings.jsonData.azureAuthType = 'msi';
|
||||
datasourceRequestMock.mockImplementation(() => Promise.reject(error));
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockRejectedValue(error);
|
||||
});
|
||||
|
||||
it('should return error status and a detailed error message', () => {
|
||||
@ -61,17 +57,13 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('and a list of resource groups is returned', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [{ name: 'grp1' }, { name: 'grp2' }],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [{ name: 'grp1' }, { name: 'grp2' }],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.instanceSettings.jsonData.tenantId = 'xxx';
|
||||
ctx.instanceSettings.jsonData.clientId = 'xxx';
|
||||
datasourceRequestMock.mockImplementation(() => Promise.resolve({ data: response, status: 200 }));
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockResolvedValue({ data: response, status: 200 });
|
||||
});
|
||||
|
||||
it('should return success status', () => {
|
||||
@ -85,19 +77,15 @@ describe('AzureMonitorDatasource', () => {
|
||||
describe('When performing metricFindQuery', () => {
|
||||
describe('with a subscriptions query', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{ displayName: 'Primary', subscriptionId: 'sub1' },
|
||||
{ displayName: 'Secondary', subscriptionId: 'sub2' },
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [
|
||||
{ displayName: 'Primary', subscriptionId: 'sub1' },
|
||||
{ displayName: 'Secondary', subscriptionId: 'sub2' },
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.instanceSettings.jsonData.azureAuthType = 'msi';
|
||||
datasourceRequestMock.mockImplementation(() => Promise.resolve(response));
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockResolvedValue(response);
|
||||
});
|
||||
|
||||
it('should return a list of subscriptions', async () => {
|
||||
@ -112,15 +100,11 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with a resource groups query', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [{ name: 'grp1' }, { name: 'grp2' }],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [{ name: 'grp1' }, { name: 'grp2' }],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation(() => Promise.resolve(response));
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockResolvedValue(response);
|
||||
});
|
||||
|
||||
it('should return a list of resource groups', async () => {
|
||||
@ -135,16 +119,12 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with a resource groups query that specifies a subscription id', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [{ name: 'grp1' }, { name: 'grp2' }],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [{ name: 'grp1' }, { name: 'grp2' }],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('11112222-eeee-4949-9b2d-9106972f9123');
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
expect(path).toContain('11112222-eeee-4949-9b2d-9106972f9123');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -161,23 +141,18 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with namespaces query', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'test',
|
||||
type: 'Microsoft.Network/networkInterfaces',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [
|
||||
{
|
||||
name: 'test',
|
||||
type: 'Microsoft.Network/networkInterfaces',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(options.url).toBe(baseUrl + '/nodesapp/resources?api-version=2018-01-01');
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(path).toBe(basePath + '/nodesapp/resources?api-version=2018-01-01');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -192,23 +167,18 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with namespaces query that specifies a subscription id', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'test',
|
||||
type: 'Microsoft.Network/networkInterfaces',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [
|
||||
{
|
||||
name: 'test',
|
||||
type: 'Microsoft.Network/networkInterfaces',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/11112222-eeee-4949-9b2d-9106972f9123/resourceGroups';
|
||||
expect(options.url).toBe(baseUrl + '/nodesapp/resources?api-version=2018-01-01');
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/11112222-eeee-4949-9b2d-9106972f9123/resourceGroups';
|
||||
expect(path).toBe(basePath + '/nodesapp/resources?api-version=2018-01-01');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -223,27 +193,22 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with resource names query', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'Failure Anomalies - nodeapp',
|
||||
type: 'microsoft.insights/alertrules',
|
||||
},
|
||||
{
|
||||
name: 'nodeapp',
|
||||
type: 'microsoft.insights/components',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [
|
||||
{
|
||||
name: 'Failure Anomalies - nodeapp',
|
||||
type: 'microsoft.insights/alertrules',
|
||||
},
|
||||
{
|
||||
name: 'nodeapp',
|
||||
type: 'microsoft.insights/components',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(options.url).toBe(baseUrl + '/nodeapp/resources?api-version=2018-01-01');
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(path).toBe(basePath + '/nodeapp/resources?api-version=2018-01-01');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -258,27 +223,22 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with resource names query and that specifies a subscription id', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'Failure Anomalies - nodeapp',
|
||||
type: 'microsoft.insights/alertrules',
|
||||
},
|
||||
{
|
||||
name: 'nodeapp',
|
||||
type: 'microsoft.insights/components',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [
|
||||
{
|
||||
name: 'Failure Anomalies - nodeapp',
|
||||
type: 'microsoft.insights/alertrules',
|
||||
},
|
||||
{
|
||||
name: 'nodeapp',
|
||||
type: 'microsoft.insights/components',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/11112222-eeee-4949-9b2d-9106972f9123/resourceGroups';
|
||||
expect(options.url).toBe(baseUrl + '/nodeapp/resources?api-version=2018-01-01');
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/11112222-eeee-4949-9b2d-9106972f9123/resourceGroups';
|
||||
expect(path).toBe(basePath + '/nodeapp/resources?api-version=2018-01-01');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -298,32 +258,27 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with metric names query', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'Percentage CPU',
|
||||
localizedValue: 'Percentage CPU',
|
||||
},
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'Percentage CPU',
|
||||
localizedValue: 'Percentage CPU',
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'UsedCapacity',
|
||||
localizedValue: 'Used capacity',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'UsedCapacity',
|
||||
localizedValue: 'Used capacity',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(options.url).toBe(
|
||||
baseUrl +
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(path).toBe(
|
||||
basePath +
|
||||
'/nodeapp/providers/microsoft.insights/components/rn/providers/microsoft.insights/' +
|
||||
'metricdefinitions?api-version=2018-01-01&metricnamespace=default'
|
||||
);
|
||||
@ -346,32 +301,27 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with metric names query and specifies a subscription id', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'Percentage CPU',
|
||||
localizedValue: 'Percentage CPU',
|
||||
},
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'Percentage CPU',
|
||||
localizedValue: 'Percentage CPU',
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'UsedCapacity',
|
||||
localizedValue: 'Used capacity',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'UsedCapacity',
|
||||
localizedValue: 'Used capacity',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/11112222-eeee-4949-9b2d-9106972f9123/resourceGroups';
|
||||
expect(options.url).toBe(
|
||||
baseUrl +
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/11112222-eeee-4949-9b2d-9106972f9123/resourceGroups';
|
||||
expect(path).toBe(
|
||||
basePath +
|
||||
'/nodeapp/providers/microsoft.insights/components/rn/providers/microsoft.insights/' +
|
||||
'metricdefinitions?api-version=2018-01-01&metricnamespace=default'
|
||||
);
|
||||
@ -394,32 +344,27 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with metric namespace query', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'Microsoft.Compute-virtualMachines',
|
||||
properties: {
|
||||
metricNamespaceName: 'Microsoft.Compute/virtualMachines',
|
||||
},
|
||||
value: [
|
||||
{
|
||||
name: 'Microsoft.Compute-virtualMachines',
|
||||
properties: {
|
||||
metricNamespaceName: 'Microsoft.Compute/virtualMachines',
|
||||
},
|
||||
{
|
||||
name: 'Telegraf-mem',
|
||||
properties: {
|
||||
metricNamespaceName: 'Telegraf/mem',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Telegraf-mem',
|
||||
properties: {
|
||||
metricNamespaceName: 'Telegraf/mem',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(options.url).toBe(
|
||||
baseUrl +
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(path).toBe(
|
||||
basePath +
|
||||
'/nodeapp/providers/Microsoft.Compute/virtualMachines/rn/providers/microsoft.insights/metricNamespaces?api-version=2017-12-01-preview'
|
||||
);
|
||||
return Promise.resolve(response);
|
||||
@ -439,32 +384,27 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('with metric namespace query and specifies a subscription id', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'Microsoft.Compute-virtualMachines',
|
||||
properties: {
|
||||
metricNamespaceName: 'Microsoft.Compute/virtualMachines',
|
||||
},
|
||||
value: [
|
||||
{
|
||||
name: 'Microsoft.Compute-virtualMachines',
|
||||
properties: {
|
||||
metricNamespaceName: 'Microsoft.Compute/virtualMachines',
|
||||
},
|
||||
{
|
||||
name: 'Telegraf-mem',
|
||||
properties: {
|
||||
metricNamespaceName: 'Telegraf/mem',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Telegraf-mem',
|
||||
properties: {
|
||||
metricNamespaceName: 'Telegraf/mem',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/11112222-eeee-4949-9b2d-9106972f9123/resourceGroups';
|
||||
expect(options.url).toBe(
|
||||
baseUrl +
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/11112222-eeee-4949-9b2d-9106972f9123/resourceGroups';
|
||||
expect(path).toBe(
|
||||
basePath +
|
||||
'/nodeapp/providers/Microsoft.Compute/virtualMachines/rn/providers/microsoft.insights/metricNamespaces?api-version=2017-12-01-preview'
|
||||
);
|
||||
return Promise.resolve(response);
|
||||
@ -487,34 +427,30 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('When performing getSubscriptions', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
id: '/subscriptions/99999999-cccc-bbbb-aaaa-9106972f9572',
|
||||
subscriptionId: '99999999-cccc-bbbb-aaaa-9106972f9572',
|
||||
tenantId: '99999999-aaaa-bbbb-cccc-51c4f982ec48',
|
||||
displayName: 'Primary Subscription',
|
||||
state: 'Enabled',
|
||||
subscriptionPolicies: {
|
||||
locationPlacementId: 'Public_2014-09-01',
|
||||
quotaId: 'PayAsYouGo_2014-09-01',
|
||||
spendingLimit: 'Off',
|
||||
},
|
||||
authorizationSource: 'RoleBased',
|
||||
value: [
|
||||
{
|
||||
id: '/subscriptions/99999999-cccc-bbbb-aaaa-9106972f9572',
|
||||
subscriptionId: '99999999-cccc-bbbb-aaaa-9106972f9572',
|
||||
tenantId: '99999999-aaaa-bbbb-cccc-51c4f982ec48',
|
||||
displayName: 'Primary Subscription',
|
||||
state: 'Enabled',
|
||||
subscriptionPolicies: {
|
||||
locationPlacementId: 'Public_2014-09-01',
|
||||
quotaId: 'PayAsYouGo_2014-09-01',
|
||||
spendingLimit: 'Off',
|
||||
},
|
||||
],
|
||||
count: {
|
||||
type: 'Total',
|
||||
value: 1,
|
||||
authorizationSource: 'RoleBased',
|
||||
},
|
||||
],
|
||||
count: {
|
||||
type: 'Total',
|
||||
value: 1,
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.instanceSettings.jsonData.azureAuthType = 'msi';
|
||||
datasourceRequestMock.mockImplementation(() => Promise.resolve(response));
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockResolvedValue(response);
|
||||
});
|
||||
|
||||
it('should return list of subscriptions', () => {
|
||||
@ -528,15 +464,11 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('When performing getResourceGroups', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [{ name: 'grp1' }, { name: 'grp2' }],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [{ name: 'grp1' }, { name: 'grp2' }],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation(() => Promise.resolve(response));
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockResolvedValue(response);
|
||||
});
|
||||
|
||||
it('should return list of Resource Groups', () => {
|
||||
@ -552,41 +484,36 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('When performing getMetricDefinitions', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'test',
|
||||
type: 'Microsoft.Network/networkInterfaces',
|
||||
},
|
||||
{
|
||||
location: 'northeurope',
|
||||
name: 'northeur',
|
||||
type: 'Microsoft.Compute/virtualMachines',
|
||||
},
|
||||
{
|
||||
location: 'westcentralus',
|
||||
name: 'us',
|
||||
type: 'Microsoft.Compute/virtualMachines',
|
||||
},
|
||||
{
|
||||
name: 'IHaveNoMetrics',
|
||||
type: 'IShouldBeFilteredOut',
|
||||
},
|
||||
{
|
||||
name: 'storageTest',
|
||||
type: 'Microsoft.Storage/storageAccounts',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [
|
||||
{
|
||||
name: 'test',
|
||||
type: 'Microsoft.Network/networkInterfaces',
|
||||
},
|
||||
{
|
||||
location: 'northeurope',
|
||||
name: 'northeur',
|
||||
type: 'Microsoft.Compute/virtualMachines',
|
||||
},
|
||||
{
|
||||
location: 'westcentralus',
|
||||
name: 'us',
|
||||
type: 'Microsoft.Compute/virtualMachines',
|
||||
},
|
||||
{
|
||||
name: 'IHaveNoMetrics',
|
||||
type: 'IShouldBeFilteredOut',
|
||||
},
|
||||
{
|
||||
name: 'storageTest',
|
||||
type: 'Microsoft.Storage/storageAccounts',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(options.url).toBe(baseUrl + '/nodesapp/resources?api-version=2018-01-01');
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(path).toBe(basePath + '/nodesapp/resources?api-version=2018-01-01');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -617,27 +544,22 @@ describe('AzureMonitorDatasource', () => {
|
||||
describe('When performing getResourceNames', () => {
|
||||
describe('and there are no special cases', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'Failure Anomalies - nodeapp',
|
||||
type: 'microsoft.insights/alertrules',
|
||||
},
|
||||
{
|
||||
name: 'nodeapp',
|
||||
type: 'microsoft.insights/components',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [
|
||||
{
|
||||
name: 'Failure Anomalies - nodeapp',
|
||||
type: 'microsoft.insights/alertrules',
|
||||
},
|
||||
{
|
||||
name: 'nodeapp',
|
||||
type: 'microsoft.insights/components',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(options.url).toBe(baseUrl + '/nodeapp/resources?api-version=2018-01-01');
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(path).toBe(basePath + '/nodeapp/resources?api-version=2018-01-01');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -655,27 +577,22 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('and the metric definition is blobServices', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: 'Failure Anomalies - nodeapp',
|
||||
type: 'microsoft.insights/alertrules',
|
||||
},
|
||||
{
|
||||
name: 'storagetest',
|
||||
type: 'Microsoft.Storage/storageAccounts',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
value: [
|
||||
{
|
||||
name: 'Failure Anomalies - nodeapp',
|
||||
type: 'microsoft.insights/alertrules',
|
||||
},
|
||||
{
|
||||
name: 'storagetest',
|
||||
type: 'Microsoft.Storage/storageAccounts',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(options.url).toBe(baseUrl + '/nodeapp/resources?api-version=2018-01-01');
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups';
|
||||
expect(path).toBe(basePath + '/nodeapp/resources?api-version=2018-01-01');
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -698,53 +615,48 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('When performing getMetricNames', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'UsedCapacity',
|
||||
localizedValue: 'Used capacity',
|
||||
},
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Total',
|
||||
supportedAggregationTypes: ['None', 'Average', 'Minimum', 'Maximum', 'Total', 'Count'],
|
||||
metricAvailabilities: [
|
||||
{ timeGrain: 'PT1H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT6H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT12H', retention: 'P93D' },
|
||||
{ timeGrain: 'P1D', retention: 'P93D' },
|
||||
],
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'UsedCapacity',
|
||||
localizedValue: 'Used capacity',
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'FreeCapacity',
|
||||
localizedValue: 'Free capacity',
|
||||
},
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Average',
|
||||
supportedAggregationTypes: ['None', 'Average'],
|
||||
metricAvailabilities: [
|
||||
{ timeGrain: 'PT1H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT6H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT12H', retention: 'P93D' },
|
||||
{ timeGrain: 'P1D', retention: 'P93D' },
|
||||
],
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Total',
|
||||
supportedAggregationTypes: ['None', 'Average', 'Minimum', 'Maximum', 'Total', 'Count'],
|
||||
metricAvailabilities: [
|
||||
{ timeGrain: 'PT1H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT6H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT12H', retention: 'P93D' },
|
||||
{ timeGrain: 'P1D', retention: 'P93D' },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'FreeCapacity',
|
||||
localizedValue: 'Free capacity',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Average',
|
||||
supportedAggregationTypes: ['None', 'Average'],
|
||||
metricAvailabilities: [
|
||||
{ timeGrain: 'PT1H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT6H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT12H', retention: 'P93D' },
|
||||
{ timeGrain: 'P1D', retention: 'P93D' },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups/nodeapp';
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups/nodeapp';
|
||||
const expected =
|
||||
baseUrl +
|
||||
basePath +
|
||||
'/providers/microsoft.insights/components/resource1' +
|
||||
'/providers/microsoft.insights/metricdefinitions?api-version=2018-01-01&metricnamespace=default';
|
||||
expect(options.url).toBe(expected);
|
||||
expect(path).toBe(expected);
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -770,53 +682,48 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('When performing getMetricMetadata', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'UsedCapacity',
|
||||
localizedValue: 'Used capacity',
|
||||
},
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Total',
|
||||
supportedAggregationTypes: ['None', 'Average', 'Minimum', 'Maximum', 'Total', 'Count'],
|
||||
metricAvailabilities: [
|
||||
{ timeGrain: 'PT1H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT6H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT12H', retention: 'P93D' },
|
||||
{ timeGrain: 'P1D', retention: 'P93D' },
|
||||
],
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'UsedCapacity',
|
||||
localizedValue: 'Used capacity',
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'FreeCapacity',
|
||||
localizedValue: 'Free capacity',
|
||||
},
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Average',
|
||||
supportedAggregationTypes: ['None', 'Average'],
|
||||
metricAvailabilities: [
|
||||
{ timeGrain: 'PT1H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT6H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT12H', retention: 'P93D' },
|
||||
{ timeGrain: 'P1D', retention: 'P93D' },
|
||||
],
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Total',
|
||||
supportedAggregationTypes: ['None', 'Average', 'Minimum', 'Maximum', 'Total', 'Count'],
|
||||
metricAvailabilities: [
|
||||
{ timeGrain: 'PT1H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT6H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT12H', retention: 'P93D' },
|
||||
{ timeGrain: 'P1D', retention: 'P93D' },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'FreeCapacity',
|
||||
localizedValue: 'Free capacity',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Average',
|
||||
supportedAggregationTypes: ['None', 'Average'],
|
||||
metricAvailabilities: [
|
||||
{ timeGrain: 'PT1H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT6H', retention: 'P93D' },
|
||||
{ timeGrain: 'PT12H', retention: 'P93D' },
|
||||
{ timeGrain: 'P1D', retention: 'P93D' },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups/nodeapp';
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups/nodeapp';
|
||||
const expected =
|
||||
baseUrl +
|
||||
basePath +
|
||||
'/providers/microsoft.insights/components/resource1' +
|
||||
'/providers/microsoft.insights/metricdefinitions?api-version=2018-01-01&metricnamespace=default';
|
||||
expect(options.url).toBe(expected);
|
||||
expect(path).toBe(expected);
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
@ -841,56 +748,51 @@ describe('AzureMonitorDatasource', () => {
|
||||
|
||||
describe('When performing getMetricMetadata on metrics with dimensions', () => {
|
||||
const response = {
|
||||
data: {
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'Transactions',
|
||||
localizedValue: 'Transactions',
|
||||
},
|
||||
unit: 'Count',
|
||||
primaryAggregationType: 'Total',
|
||||
supportedAggregationTypes: ['None', 'Average', 'Minimum', 'Maximum', 'Total', 'Count'],
|
||||
isDimensionRequired: false,
|
||||
dimensions: [
|
||||
{
|
||||
value: 'ResponseType',
|
||||
localizedValue: 'Response type',
|
||||
},
|
||||
{
|
||||
value: 'GeoType',
|
||||
localizedValue: 'Geo type',
|
||||
},
|
||||
{
|
||||
value: 'ApiName',
|
||||
localizedValue: 'API name',
|
||||
},
|
||||
],
|
||||
value: [
|
||||
{
|
||||
name: {
|
||||
value: 'Transactions',
|
||||
localizedValue: 'Transactions',
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'FreeCapacity',
|
||||
localizedValue: 'Free capacity',
|
||||
unit: 'Count',
|
||||
primaryAggregationType: 'Total',
|
||||
supportedAggregationTypes: ['None', 'Average', 'Minimum', 'Maximum', 'Total', 'Count'],
|
||||
isDimensionRequired: false,
|
||||
dimensions: [
|
||||
{
|
||||
value: 'ResponseType',
|
||||
localizedValue: 'Response type',
|
||||
},
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Average',
|
||||
supportedAggregationTypes: ['None', 'Average'],
|
||||
{
|
||||
value: 'GeoType',
|
||||
localizedValue: 'Geo type',
|
||||
},
|
||||
{
|
||||
value: 'ApiName',
|
||||
localizedValue: 'API name',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: {
|
||||
value: 'FreeCapacity',
|
||||
localizedValue: 'Free capacity',
|
||||
},
|
||||
],
|
||||
},
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
unit: 'CountPerSecond',
|
||||
primaryAggregationType: 'Average',
|
||||
supportedAggregationTypes: ['None', 'Average'],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
const baseUrl =
|
||||
'http://azuremonitor.com/azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups/nodeapp';
|
||||
ctx.ds.azureMonitorDatasource.getResource = jest.fn().mockImplementation((path: string) => {
|
||||
const basePath = 'azuremonitor/subscriptions/9935389e-9122-4ef9-95f9-1513dd24753f/resourceGroups/nodeapp';
|
||||
const expected =
|
||||
baseUrl +
|
||||
basePath +
|
||||
'/providers/microsoft.insights/components/resource1' +
|
||||
'/providers/microsoft.insights/metricdefinitions?api-version=2018-01-01&metricnamespace=default';
|
||||
expect(options.url).toBe(expected);
|
||||
expect(path).toBe(expected);
|
||||
return Promise.resolve(response);
|
||||
});
|
||||
});
|
||||
|
@ -9,7 +9,6 @@ import {
|
||||
AzureMonitorMetricDefinitionsResponse,
|
||||
AzureMonitorResourceGroupsResponse,
|
||||
AzureQueryType,
|
||||
AzureMonitorMetricsMetadataResponse,
|
||||
AzureMetricQuery,
|
||||
DatasourceValidationResult,
|
||||
} from '../types';
|
||||
@ -21,14 +20,14 @@ import {
|
||||
DataQueryRequest,
|
||||
TimeRange,
|
||||
} from '@grafana/data';
|
||||
import { getBackendSrv, DataSourceWithBackend, getTemplateSrv, FetchResponse } from '@grafana/runtime';
|
||||
import { DataSourceWithBackend, getTemplateSrv } from '@grafana/runtime';
|
||||
import { from, Observable } from 'rxjs';
|
||||
import { mergeMap } from 'rxjs/operators';
|
||||
|
||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { getAuthType, getAzureCloud, getAzurePortalUrl } from '../credentials';
|
||||
import { getManagementApiRoute } from '../api/routes';
|
||||
import { resourceTypeDisplayNames } from '../azureMetadata';
|
||||
import { routeNames } from '../utils/common';
|
||||
|
||||
const defaultDropdownValue = 'select';
|
||||
|
||||
@ -46,11 +45,10 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
apiVersion = '2018-01-01';
|
||||
apiPreviewVersion = '2017-12-01-preview';
|
||||
defaultSubscriptionId?: string;
|
||||
baseUrl: string;
|
||||
resourcePath: string;
|
||||
azurePortalUrl: string;
|
||||
resourceGroup: string;
|
||||
resourceName: string;
|
||||
url: string;
|
||||
supportedMetricNamespaces: string[] = [];
|
||||
timeSrv: TimeSrv;
|
||||
|
||||
@ -61,12 +59,9 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
this.defaultSubscriptionId = instanceSettings.jsonData.subscriptionId;
|
||||
|
||||
const cloud = getAzureCloud(instanceSettings);
|
||||
const route = getManagementApiRoute(cloud);
|
||||
this.baseUrl = `/${route}/subscriptions`;
|
||||
this.azurePortalUrl = getAzurePortalUrl(cloud);
|
||||
|
||||
this.url = instanceSettings.url!;
|
||||
this.resourcePath = `${routeNames.azureMonitor}/subscriptions`;
|
||||
this.supportedMetricNamespaces = new SupportedNamespaces(cloud).get();
|
||||
this.azurePortalUrl = getAzurePortalUrl(cloud);
|
||||
}
|
||||
|
||||
isConfigured(): boolean {
|
||||
@ -344,22 +339,23 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
return [];
|
||||
}
|
||||
|
||||
const url = `${this.baseUrl}?api-version=2019-03-01`;
|
||||
return await this.doRequest(url).then((result: any) => {
|
||||
return this.getResource(`${this.resourcePath}?api-version=2019-03-01`).then((result: any) => {
|
||||
return ResponseParser.parseSubscriptions(result);
|
||||
});
|
||||
}
|
||||
|
||||
getResourceGroups(subscriptionId: string) {
|
||||
const url = `${this.baseUrl}/${subscriptionId}/resourceGroups?api-version=${this.apiVersion}`;
|
||||
return this.doRequest(url).then((result: AzureMonitorResourceGroupsResponse) => {
|
||||
return this.getResource(
|
||||
`${this.resourcePath}/${subscriptionId}/resourceGroups?api-version=${this.apiVersion}`
|
||||
).then((result: AzureMonitorResourceGroupsResponse) => {
|
||||
return ResponseParser.parseResponseValues(result, 'name', 'name');
|
||||
});
|
||||
}
|
||||
|
||||
getMetricDefinitions(subscriptionId: string, resourceGroup: string) {
|
||||
const url = `${this.baseUrl}/${subscriptionId}/resourceGroups/${resourceGroup}/resources?api-version=${this.apiVersion}`;
|
||||
return this.doRequest(url)
|
||||
return this.getResource(
|
||||
`${this.resourcePath}/${subscriptionId}/resourceGroups/${resourceGroup}/resources?api-version=${this.apiVersion}`
|
||||
)
|
||||
.then((result: AzureMonitorMetricDefinitionsResponse) => {
|
||||
return ResponseParser.parseResponseValues(result, 'type', 'type');
|
||||
})
|
||||
@ -410,9 +406,9 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
}
|
||||
|
||||
getResourceNames(subscriptionId: string, resourceGroup: string, metricDefinition: string) {
|
||||
const url = `${this.baseUrl}/${subscriptionId}/resourceGroups/${resourceGroup}/resources?api-version=${this.apiVersion}`;
|
||||
|
||||
return this.doRequest(url).then((result: any) => {
|
||||
return this.getResource(
|
||||
`${this.resourcePath}/${subscriptionId}/resourceGroups/${resourceGroup}/resources?api-version=${this.apiVersion}`
|
||||
).then((result: any) => {
|
||||
if (!startsWith(metricDefinition, 'Microsoft.Storage/storageAccounts/')) {
|
||||
return ResponseParser.parseResourceNames(result, metricDefinition);
|
||||
}
|
||||
@ -429,7 +425,7 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
|
||||
getMetricNamespaces(subscriptionId: string, resourceGroup: string, metricDefinition: string, resourceName: string) {
|
||||
const url = UrlBuilder.buildAzureMonitorGetMetricNamespacesUrl(
|
||||
this.baseUrl,
|
||||
this.resourcePath,
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
metricDefinition,
|
||||
@ -437,7 +433,7 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
this.apiPreviewVersion
|
||||
);
|
||||
|
||||
return this.doRequest(url).then((result: any) => {
|
||||
return this.getResource(url).then((result: any) => {
|
||||
return ResponseParser.parseResponseValues(result, 'name', 'properties.metricNamespaceName');
|
||||
});
|
||||
}
|
||||
@ -450,7 +446,7 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
metricNamespace: string
|
||||
) {
|
||||
const url = UrlBuilder.buildAzureMonitorGetMetricNamesUrl(
|
||||
this.baseUrl,
|
||||
this.resourcePath,
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
metricDefinition,
|
||||
@ -459,7 +455,7 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
this.apiVersion
|
||||
);
|
||||
|
||||
return this.doRequest(url).then((result: any) => {
|
||||
return this.getResource(url).then((result: any) => {
|
||||
return ResponseParser.parseResponseValues(result, 'name.localizedValue', 'name.value');
|
||||
});
|
||||
}
|
||||
@ -473,7 +469,7 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
metricName: string
|
||||
) {
|
||||
const url = UrlBuilder.buildAzureMonitorGetMetricNamesUrl(
|
||||
this.baseUrl,
|
||||
this.resourcePath,
|
||||
subscriptionId,
|
||||
resourceGroup,
|
||||
metricDefinition,
|
||||
@ -482,8 +478,8 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
this.apiVersion
|
||||
);
|
||||
|
||||
return this.doRequest<AzureMonitorMetricsMetadataResponse>(url).then((result) => {
|
||||
return ResponseParser.parseMetadata(result.data, metricName);
|
||||
return this.getResource(url).then((result: any) => {
|
||||
return ResponseParser.parseMetadata(result, metricName);
|
||||
});
|
||||
}
|
||||
|
||||
@ -494,20 +490,13 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
}
|
||||
|
||||
try {
|
||||
const url = `${this.baseUrl}?api-version=2019-03-01`;
|
||||
|
||||
return await this.doRequest(url).then<DatasourceValidationResult>((response: any) => {
|
||||
if (response.status === 200) {
|
||||
return {
|
||||
status: 'success',
|
||||
message: 'Successfully queried the Azure Monitor service.',
|
||||
title: 'Success',
|
||||
};
|
||||
}
|
||||
const url = `${this.resourcePath}?api-version=2019-03-01`;
|
||||
|
||||
return await this.getResource(url).then<DatasourceValidationResult>((response: any) => {
|
||||
return {
|
||||
status: 'error',
|
||||
message: 'Returned http status code ' + response.status,
|
||||
status: 'success',
|
||||
message: 'Successfully queried the Azure Monitor service.',
|
||||
title: 'Success',
|
||||
};
|
||||
});
|
||||
} catch (e) {
|
||||
@ -555,19 +544,4 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
||||
private isValidConfigField(field?: string): boolean {
|
||||
return typeof field === 'string' && field.length > 0;
|
||||
}
|
||||
|
||||
doRequest<T = any>(url: string, maxRetries = 1): Promise<FetchResponse<T>> {
|
||||
return getBackendSrv()
|
||||
.datasourceRequest<T>({
|
||||
url: this.url + url,
|
||||
method: 'GET',
|
||||
})
|
||||
.catch((error: any) => {
|
||||
if (maxRetries > 0) {
|
||||
return this.doRequest<T>(url, maxRetries - 1);
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -18,10 +18,10 @@ export default class ResponseParser {
|
||||
return list;
|
||||
}
|
||||
|
||||
for (let i = 0; i < result.data.value.length; i++) {
|
||||
if (!find(list, ['value', get(result.data.value[i], valueFieldName)])) {
|
||||
const value = get(result.data.value[i], valueFieldName);
|
||||
const text = get(result.data.value[i], textFieldName, value);
|
||||
for (let i = 0; i < result.value.length; i++) {
|
||||
if (!find(list, ['value', get(result.value[i], valueFieldName)])) {
|
||||
const value = get(result.value[i], valueFieldName);
|
||||
const text = get(result.value[i], textFieldName, value);
|
||||
|
||||
list.push({
|
||||
text: text,
|
||||
@ -39,11 +39,11 @@ export default class ResponseParser {
|
||||
return list;
|
||||
}
|
||||
|
||||
for (let i = 0; i < result.data.value.length; i++) {
|
||||
if (result.data.value[i].type === metricDefinition) {
|
||||
for (let i = 0; i < result.value.length; i++) {
|
||||
if (result.value[i].type === metricDefinition) {
|
||||
list.push({
|
||||
text: result.data.value[i].name,
|
||||
value: result.data.value[i].name,
|
||||
text: result.value[i].name,
|
||||
value: result.value[i].name,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -113,11 +113,11 @@ export default class ResponseParser {
|
||||
|
||||
const valueFieldName = 'subscriptionId';
|
||||
const textFieldName = 'displayName';
|
||||
for (let i = 0; i < result.data.value.length; i++) {
|
||||
if (!find(list, ['value', get(result.data.value[i], valueFieldName)])) {
|
||||
for (let i = 0; i < result.value.length; i++) {
|
||||
if (!find(list, ['value', get(result.value[i], valueFieldName)])) {
|
||||
list.push({
|
||||
text: `${get(result.data.value[i], textFieldName)}`,
|
||||
value: get(result.data.value[i], valueFieldName),
|
||||
text: `${get(result.value[i], textFieldName)}`,
|
||||
value: get(result.value[i], valueFieldName),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,8 @@ import { getBackendSrv, getTemplateSrv, TemplateSrv } from '@grafana/runtime';
|
||||
import { InsightsConfig } from './InsightsConfig';
|
||||
import ResponseParser from '../azure_monitor/response_parser';
|
||||
import { AzureDataSourceJsonData, AzureDataSourceSecureJsonData, AzureDataSourceSettings } from '../types';
|
||||
import { getAzureCloud, isAppInsightsConfigured } from '../credentials';
|
||||
import { getManagementApiRoute } from '../api/routes';
|
||||
import { isAppInsightsConfigured } from '../credentials';
|
||||
import { routeNames } from '../utils/common';
|
||||
|
||||
export type Props = DataSourcePluginOptionsEditorProps<AzureDataSourceJsonData, AzureDataSourceSecureJsonData>;
|
||||
|
||||
@ -25,6 +25,7 @@ export interface State {
|
||||
|
||||
export class ConfigEditor extends PureComponent<Props, State> {
|
||||
templateSrv: TemplateSrv = getTemplateSrv();
|
||||
baseURL: string;
|
||||
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
@ -33,10 +34,7 @@ export class ConfigEditor extends PureComponent<Props, State> {
|
||||
unsaved: false,
|
||||
appInsightsInitiallyConfigured: isAppInsightsConfigured(props.options),
|
||||
};
|
||||
|
||||
if (this.props.options.id) {
|
||||
updateDatasourcePluginOption(this.props, 'url', '/api/datasources/proxy/' + this.props.options.id);
|
||||
}
|
||||
this.baseURL = `/api/datasources/${this.props.options.id}/resources/${routeNames.azureMonitor}/subscriptions`;
|
||||
}
|
||||
|
||||
private updateOptions = (optionsFunc: (options: AzureDataSourceSettings) => AzureDataSourceSettings): void => {
|
||||
@ -61,12 +59,9 @@ export class ConfigEditor extends PureComponent<Props, State> {
|
||||
private getSubscriptions = async (): Promise<Array<SelectableValue<string>>> => {
|
||||
await this.saveOptions();
|
||||
|
||||
const cloud = getAzureCloud(this.props.options);
|
||||
const route = getManagementApiRoute(cloud);
|
||||
const url = `/${route}/subscriptions?api-version=2019-03-01`;
|
||||
|
||||
const query = `?api-version=2019-03-01`;
|
||||
const result = await getBackendSrv().datasourceRequest({
|
||||
url: this.props.options.url + url,
|
||||
url: this.baseURL + query,
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
@ -76,12 +71,9 @@ export class ConfigEditor extends PureComponent<Props, State> {
|
||||
private getLogAnalyticsSubscriptions = async (): Promise<Array<SelectableValue<string>>> => {
|
||||
await this.saveOptions();
|
||||
|
||||
const cloud = getAzureCloud(this.props.options);
|
||||
const route = getManagementApiRoute(cloud);
|
||||
const url = `/${route}/subscriptions?api-version=2019-03-01`;
|
||||
|
||||
const query = `?api-version=2019-03-01`;
|
||||
const result = await getBackendSrv().datasourceRequest({
|
||||
url: this.props.options.url + url,
|
||||
url: this.baseURL + query,
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
@ -91,12 +83,9 @@ export class ConfigEditor extends PureComponent<Props, State> {
|
||||
private getWorkspaces = async (subscriptionId: string): Promise<Array<SelectableValue<string>>> => {
|
||||
await this.saveOptions();
|
||||
|
||||
const cloud = getAzureCloud(this.props.options);
|
||||
const route = getManagementApiRoute(cloud);
|
||||
const url = `/${route}/subscriptions/${subscriptionId}/providers/Microsoft.OperationalInsights/workspaces?api-version=2017-04-26-preview`;
|
||||
|
||||
const workspaceURL = `/${subscriptionId}/providers/Microsoft.OperationalInsights/workspaces?api-version=2017-04-26-preview`;
|
||||
const result = await getBackendSrv().datasourceRequest({
|
||||
url: this.props.options.url + url,
|
||||
url: this.baseURL + workspaceURL,
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -35,155 +35,6 @@
|
||||
"updated": "2018-12-06"
|
||||
},
|
||||
|
||||
"routes": [
|
||||
{
|
||||
"path": "azuremonitor",
|
||||
"method": "*",
|
||||
"url": "https://management.azure.com",
|
||||
"authType": "azure",
|
||||
"tokenAuth": {
|
||||
"scopes": ["https://management.azure.com/.default"],
|
||||
"params": {
|
||||
"azure_auth_type": "{{.JsonData.azureAuthType | orEmpty}}",
|
||||
"azure_cloud": "AzureCloud",
|
||||
"tenant_id": "{{.JsonData.tenantId | orEmpty}}",
|
||||
"client_id": "{{.JsonData.clientId | orEmpty}}",
|
||||
"client_secret": "{{.SecureJsonData.clientSecret | orEmpty}}"
|
||||
}
|
||||
},
|
||||
"headers": [{ "name": "x-ms-app", "content": "Grafana" }]
|
||||
},
|
||||
{
|
||||
"path": "govazuremonitor",
|
||||
"method": "*",
|
||||
"url": "https://management.usgovcloudapi.net",
|
||||
"authType": "azure",
|
||||
"tokenAuth": {
|
||||
"scopes": ["https://management.usgovcloudapi.net/.default"],
|
||||
"params": {
|
||||
"azure_auth_type": "{{.JsonData.azureAuthType | orEmpty}}",
|
||||
"azure_cloud": "AzureUSGovernment",
|
||||
"tenant_id": "{{.JsonData.tenantId | orEmpty}}",
|
||||
"client_id": "{{.JsonData.clientId | orEmpty}}",
|
||||
"client_secret": "{{.SecureJsonData.clientSecret | orEmpty}}"
|
||||
}
|
||||
},
|
||||
"headers": [{ "name": "x-ms-app", "content": "Grafana" }]
|
||||
},
|
||||
{
|
||||
"path": "germanyazuremonitor",
|
||||
"method": "*",
|
||||
"url": "https://management.microsoftazure.de",
|
||||
"authType": "azure",
|
||||
"tokenAuth": {
|
||||
"scopes": ["https://management.microsoftazure.de/.default"],
|
||||
"params": {
|
||||
"azure_auth_type": "{{.JsonData.azureAuthType | orEmpty}}",
|
||||
"azure_cloud": "AzureGermanCloud",
|
||||
"tenant_id": "{{.JsonData.tenantId | orEmpty}}",
|
||||
"client_id": "{{.JsonData.clientId | orEmpty}}",
|
||||
"client_secret": "{{.SecureJsonData.clientSecret | orEmpty}}"
|
||||
}
|
||||
},
|
||||
"headers": [{ "name": "x-ms-app", "content": "Grafana" }]
|
||||
},
|
||||
{
|
||||
"path": "chinaazuremonitor",
|
||||
"method": "*",
|
||||
"url": "https://management.chinacloudapi.cn",
|
||||
"authType": "azure",
|
||||
"tokenAuth": {
|
||||
"scopes": ["https://management.chinacloudapi.cn/.default"],
|
||||
"params": {
|
||||
"azure_auth_type": "{{.JsonData.azureAuthType | orEmpty}}",
|
||||
"azure_cloud": "AzureChinaCloud",
|
||||
"tenant_id": "{{.JsonData.tenantId | orEmpty}}",
|
||||
"client_id": "{{.JsonData.clientId | orEmpty}}",
|
||||
"client_secret": "{{.SecureJsonData.clientSecret | orEmpty}}"
|
||||
}
|
||||
},
|
||||
"headers": [{ "name": "x-ms-app", "content": "Grafana" }]
|
||||
},
|
||||
{
|
||||
"path": "appinsights",
|
||||
"method": "GET",
|
||||
"url": "https://api.applicationinsights.io",
|
||||
"headers": [
|
||||
{ "name": "X-API-Key", "content": "{{.SecureJsonData.appInsightsApiKey}}" },
|
||||
{ "name": "x-ms-app", "content": "Grafana" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"path": "chinaappinsights",
|
||||
"method": "GET",
|
||||
"url": "https://api.applicationinsights.azure.cn",
|
||||
"headers": [
|
||||
{ "name": "X-API-Key", "content": "{{.SecureJsonData.appInsightsApiKey}}" },
|
||||
{ "name": "x-ms-app", "content": "Grafana" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"path": "loganalyticsazure",
|
||||
"method": "GET",
|
||||
"url": "https://api.loganalytics.io/",
|
||||
"authType": "azure",
|
||||
"tokenAuth": {
|
||||
"scopes": ["https://api.loganalytics.io/.default"],
|
||||
"params": {
|
||||
"azure_auth_type": "{{.JsonData.azureAuthType | orEmpty}}",
|
||||
"azure_cloud": "AzureCloud",
|
||||
"tenant_id": "{{.JsonData.tenantId | orEmpty}}",
|
||||
"client_id": "{{.JsonData.clientId | orEmpty}}",
|
||||
"client_secret": "{{.SecureJsonData.clientSecret | orEmpty}}"
|
||||
}
|
||||
},
|
||||
"headers": [
|
||||
{ "name": "x-ms-app", "content": "Grafana" },
|
||||
{ "name": "Cache-Control", "content": "public, max-age=60" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"path": "chinaloganalyticsazure",
|
||||
"method": "GET",
|
||||
"url": "https://api.loganalytics.azure.cn/",
|
||||
"authType": "azure",
|
||||
"tokenAuth": {
|
||||
"scopes": ["https://api.loganalytics.azure.cn/.default"],
|
||||
"params": {
|
||||
"azure_auth_type": "{{.JsonData.azureAuthType | orEmpty}}",
|
||||
"azure_cloud": "AzureChinaCloud",
|
||||
"tenant_id": "{{.JsonData.tenantId | orEmpty}}",
|
||||
"client_id": "{{.JsonData.clientId | orEmpty}}",
|
||||
"client_secret": "{{.SecureJsonData.clientSecret | orEmpty}}"
|
||||
}
|
||||
},
|
||||
"headers": [
|
||||
{ "name": "x-ms-app", "content": "Grafana" },
|
||||
{ "name": "Cache-Control", "content": "public, max-age=60" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"path": "govloganalyticsazure",
|
||||
"method": "GET",
|
||||
"url": "https://api.loganalytics.us/",
|
||||
"authType": "azure",
|
||||
"tokenAuth": {
|
||||
"scopes": ["https://api.loganalytics.us/.default"],
|
||||
"params": {
|
||||
"azure_auth_type": "{{.JsonData.azureAuthType | orEmpty}}",
|
||||
"azure_cloud": "AzureUSGovernment",
|
||||
"tenant_id": "{{.JsonData.tenantId | orEmpty}}",
|
||||
"client_id": "{{.JsonData.clientId | orEmpty}}",
|
||||
"client_secret": "{{.SecureJsonData.clientSecret | orEmpty}}"
|
||||
}
|
||||
},
|
||||
"headers": [
|
||||
{ "name": "x-ms-app", "content": "Grafana" },
|
||||
{ "name": "Cache-Control", "content": "public, max-age=60" }
|
||||
]
|
||||
}
|
||||
],
|
||||
|
||||
"dependencies": {
|
||||
"grafanaVersion": "5.2.x",
|
||||
"plugins": []
|
||||
|
@ -1,8 +1,3 @@
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { createFetchResponse } from 'test/helpers/createFetchResponse';
|
||||
import { backendSrv } from 'app/core/services/backend_srv';
|
||||
|
||||
import ResourcePickerData from './resourcePickerData';
|
||||
import {
|
||||
createMockARGResourceContainersResponse,
|
||||
@ -11,47 +6,34 @@ import {
|
||||
import { ResourceRowType } from '../components/ResourcePicker/types';
|
||||
import { createMockInstanceSetttings } from '../__mocks__/instanceSettings';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...((jest.requireActual('@grafana/runtime') as unknown) as object),
|
||||
getBackendSrv: () => backendSrv,
|
||||
}));
|
||||
|
||||
const instanceSettings = createMockInstanceSetttings();
|
||||
const resourcePickerData = new ResourcePickerData(instanceSettings);
|
||||
let postResource: jest.Mock;
|
||||
|
||||
describe('AzureMonitor resourcePickerData', () => {
|
||||
describe('getResourcePickerData', () => {
|
||||
let fetchMock: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
fetchMock = jest.spyOn(backendSrv, 'fetch');
|
||||
fetchMock.mockImplementation(() => {
|
||||
const data = createMockARGResourceContainersResponse();
|
||||
return of(createFetchResponse(data));
|
||||
});
|
||||
postResource = jest.fn().mockResolvedValue(createMockARGResourceContainersResponse());
|
||||
resourcePickerData.postResource = postResource;
|
||||
});
|
||||
|
||||
afterEach(() => fetchMock.mockReset());
|
||||
|
||||
it('calls ARG API', async () => {
|
||||
const resourcePickerData = new ResourcePickerData(instanceSettings);
|
||||
await resourcePickerData.getResourcePickerData();
|
||||
|
||||
expect(fetchMock).toHaveBeenCalled();
|
||||
const argQuery = fetchMock.mock.calls[0][0].data.query;
|
||||
expect(postResource).toHaveBeenCalled();
|
||||
const argQuery = postResource.mock.calls[0][1].query;
|
||||
|
||||
expect(argQuery).toContain(`where type == 'microsoft.resources/subscriptions'`);
|
||||
expect(argQuery).toContain(`where type == 'microsoft.resources/subscriptions/resourcegroups'`);
|
||||
});
|
||||
|
||||
it('returns only subscriptions at the top level', async () => {
|
||||
const resourcePickerData = new ResourcePickerData(instanceSettings);
|
||||
const results = await resourcePickerData.getResourcePickerData();
|
||||
|
||||
expect(results.map((v) => v.id)).toEqual(['/subscriptions/abc-123', '/subscription/def-456']);
|
||||
});
|
||||
|
||||
it('nests resource groups under their subscriptions', async () => {
|
||||
const resourcePickerData = new ResourcePickerData(instanceSettings);
|
||||
const results = await resourcePickerData.getResourcePickerData();
|
||||
|
||||
expect(results[0].children?.map((v) => v.id)).toEqual([
|
||||
@ -68,8 +50,6 @@ describe('AzureMonitor resourcePickerData', () => {
|
||||
});
|
||||
|
||||
describe('getResourcesForResourceGroup', () => {
|
||||
let fetchMock: jest.SpyInstance;
|
||||
|
||||
const resourceRow = {
|
||||
id: '/subscription/def-456/resourceGroups/dev',
|
||||
name: 'Dev',
|
||||
@ -78,27 +58,20 @@ describe('AzureMonitor resourcePickerData', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
fetchMock = jest.spyOn(backendSrv, 'fetch');
|
||||
fetchMock.mockImplementation(() => {
|
||||
const data = createARGResourcesResponse();
|
||||
return of(createFetchResponse(data));
|
||||
});
|
||||
postResource = jest.fn().mockResolvedValue(createARGResourcesResponse());
|
||||
resourcePickerData.postResource = postResource;
|
||||
});
|
||||
|
||||
afterEach(() => fetchMock.mockReset());
|
||||
|
||||
it('requests resources for the specified resource row', async () => {
|
||||
const resourcePickerData = new ResourcePickerData(instanceSettings);
|
||||
await resourcePickerData.getResourcesForResourceGroup(resourceRow);
|
||||
|
||||
expect(fetchMock).toHaveBeenCalled();
|
||||
const argQuery = fetchMock.mock.calls[0][0].data.query;
|
||||
expect(postResource).toHaveBeenCalled();
|
||||
const argQuery = postResource.mock.calls[0][1].query;
|
||||
|
||||
expect(argQuery).toContain(resourceRow.id);
|
||||
});
|
||||
|
||||
it('returns formatted resources', async () => {
|
||||
const resourcePickerData = new ResourcePickerData(instanceSettings);
|
||||
const results = await resourcePickerData.getResourcesForResourceGroup(resourceRow);
|
||||
|
||||
expect(results.map((v) => v.id)).toEqual([
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import { getManagementApiRoute } from '../api/routes';
|
||||
import { DataSourceWithBackend } from '@grafana/runtime';
|
||||
import { DataSourceInstanceSettings } from '../../../../../../packages/grafana-data/src';
|
||||
import {
|
||||
locationDisplayNames,
|
||||
logsSupportedLocationsKusto,
|
||||
@ -8,24 +8,24 @@ import {
|
||||
} from '../azureMetadata';
|
||||
import { ResourceRowType, ResourceRow, ResourceRowGroup } from '../components/ResourcePicker/types';
|
||||
import { parseResourceURI } from '../components/ResourcePicker/utils';
|
||||
import { getAzureCloud } from '../credentials';
|
||||
import {
|
||||
AzureDataSourceInstanceSettings,
|
||||
AzureDataSourceJsonData,
|
||||
AzureGraphResponse,
|
||||
AzureMonitorQuery,
|
||||
AzureResourceSummaryItem,
|
||||
RawAzureResourceGroupItem,
|
||||
RawAzureResourceItem,
|
||||
} from '../types';
|
||||
import { routeNames } from '../utils/common';
|
||||
|
||||
const RESOURCE_GRAPH_URL = '/providers/Microsoft.ResourceGraph/resources?api-version=2021-03-01';
|
||||
|
||||
export default class ResourcePickerData {
|
||||
private proxyUrl: string;
|
||||
private cloud: string;
|
||||
export default class ResourcePickerData extends DataSourceWithBackend<AzureMonitorQuery, AzureDataSourceJsonData> {
|
||||
private resourcePath: string;
|
||||
|
||||
constructor(instanceSettings: AzureDataSourceInstanceSettings) {
|
||||
this.proxyUrl = instanceSettings.url!;
|
||||
this.cloud = getAzureCloud(instanceSettings);
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) {
|
||||
super(instanceSettings);
|
||||
this.resourcePath = `${routeNames.resourceGraph}`;
|
||||
}
|
||||
|
||||
static readonly templateVariableGroupID = '$$grafana-templateVariables$$';
|
||||
@ -54,29 +54,19 @@ export default class ResourcePickerData {
|
||||
| order by subscriptionURI asc
|
||||
`;
|
||||
|
||||
const { ok, data: response } = await this.makeResourceGraphRequest<RawAzureResourceGroupItem[]>(query);
|
||||
|
||||
// TODO: figure out desired error handling strategy
|
||||
if (!ok) {
|
||||
throw new Error('unable to fetch resource containers');
|
||||
}
|
||||
const response = await this.makeResourceGraphRequest<RawAzureResourceGroupItem[]>(query);
|
||||
|
||||
return formatResourceGroupData(response.data);
|
||||
}
|
||||
|
||||
async getResourcesForResourceGroup(resourceGroup: ResourceRow) {
|
||||
const { ok, data: response } = await this.makeResourceGraphRequest<RawAzureResourceItem[]>(`
|
||||
const { data: response } = await this.makeResourceGraphRequest<RawAzureResourceItem[]>(`
|
||||
resources
|
||||
| where id hasprefix "${resourceGroup.id}"
|
||||
| where type in (${logsSupportedResourceTypesKusto}) and location in (${logsSupportedLocationsKusto})
|
||||
`);
|
||||
|
||||
// TODO: figure out desired error handling strategy
|
||||
if (!ok) {
|
||||
throw new Error('unable to fetch resource containers');
|
||||
}
|
||||
|
||||
return formatResourceGroupChildren(response.data);
|
||||
return formatResourceGroupChildren(response);
|
||||
}
|
||||
|
||||
async getResourceURIDisplayProperties(resourceURI: string): Promise<AzureResourceSummaryItem> {
|
||||
@ -113,51 +103,37 @@ export default class ResourcePickerData {
|
||||
| project subscriptionName, resourceGroupName, resourceName
|
||||
`;
|
||||
|
||||
const { ok, data: response } = await this.makeResourceGraphRequest<AzureResourceSummaryItem[]>(query);
|
||||
const { data: response } = await this.makeResourceGraphRequest<AzureResourceSummaryItem[]>(query);
|
||||
|
||||
if (!ok || !response.data[0]) {
|
||||
if (!response.length) {
|
||||
throw new Error('unable to fetch resource details');
|
||||
}
|
||||
|
||||
return response.data[0];
|
||||
return response[0];
|
||||
}
|
||||
|
||||
async getResourceURIFromWorkspace(workspace: string) {
|
||||
const { ok, data: response } = await this.makeResourceGraphRequest<RawAzureResourceItem[]>(`
|
||||
const { data: response } = await this.makeResourceGraphRequest<RawAzureResourceItem[]>(`
|
||||
resources
|
||||
| where properties['customerId'] == "${workspace}"
|
||||
| project id
|
||||
`);
|
||||
|
||||
// TODO: figure out desired error handling strategy
|
||||
if (!ok) {
|
||||
throw new Error('unable to fetch resource containers');
|
||||
}
|
||||
|
||||
if (!response.data.length) {
|
||||
if (!response.length) {
|
||||
throw new Error('unable to find resource for workspace ' + workspace);
|
||||
}
|
||||
|
||||
return response.data[0].id;
|
||||
return response[0].id;
|
||||
}
|
||||
|
||||
async makeResourceGraphRequest<T = unknown>(
|
||||
query: string,
|
||||
maxRetries = 1
|
||||
): Promise<FetchResponse<AzureGraphResponse<T>>> {
|
||||
async makeResourceGraphRequest<T = unknown>(query: string, maxRetries = 1): Promise<AzureGraphResponse<T>> {
|
||||
try {
|
||||
return await getBackendSrv()
|
||||
.fetch<AzureGraphResponse<T>>({
|
||||
url: this.proxyUrl + '/' + getManagementApiRoute(this.cloud) + RESOURCE_GRAPH_URL,
|
||||
method: 'POST',
|
||||
data: {
|
||||
query: query,
|
||||
options: {
|
||||
resultFormat: 'objectArray',
|
||||
},
|
||||
},
|
||||
})
|
||||
.toPromise();
|
||||
return await this.postResource(this.resourcePath + RESOURCE_GRAPH_URL, {
|
||||
query: query,
|
||||
options: {
|
||||
resultFormat: 'objectArray',
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
if (maxRetries > 0) {
|
||||
return this.makeResourceGraphRequest(query, maxRetries - 1);
|
||||
|
@ -28,3 +28,12 @@ export function convertTimeGrainsToMs<T extends { value: string }>(timeGrains: T
|
||||
});
|
||||
return allowedTimeGrainsMs;
|
||||
}
|
||||
|
||||
// Route definitions shared with the backend.
|
||||
// Check: /pkg/tsdb/azuremonitor/azuremonitor-resource-handler.go <registerRoutes>
|
||||
export const routeNames = {
|
||||
azureMonitor: 'azuremonitor',
|
||||
logAnalytics: 'loganalytics',
|
||||
appInsights: 'appinsights',
|
||||
resourceGraph: 'resourcegraph',
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user