mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
AzureMonitor: move metric deep link code to backend (#39524)
This commit is contained in:
parent
73936fc63c
commit
bf9be975ac
@ -186,7 +186,7 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
|
|||||||
}
|
}
|
||||||
|
|
||||||
url := azurePortalUrl + "/#blade/HubsExtension/ArgQueryBlade/query/" + url.PathEscape(query.InterpolatedQuery)
|
url := azurePortalUrl + "/#blade/HubsExtension/ArgQueryBlade/query/" + url.PathEscape(query.InterpolatedQuery)
|
||||||
frameWithLink := addConfigData(*frame, url)
|
frameWithLink := addConfigLinks(*frame, url)
|
||||||
if frameWithLink.Meta == nil {
|
if frameWithLink.Meta == nil {
|
||||||
frameWithLink.Meta = &data.FrameMeta{}
|
frameWithLink.Meta = &data.FrameMeta{}
|
||||||
}
|
}
|
||||||
@ -196,7 +196,7 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
|
|||||||
return dataResponse
|
return dataResponse
|
||||||
}
|
}
|
||||||
|
|
||||||
func addConfigData(frame data.Frame, dl string) data.Frame {
|
func addConfigLinks(frame data.Frame, dl string) data.Frame {
|
||||||
for i := range frame.Fields {
|
for i := range frame.Fields {
|
||||||
if frame.Fields[i].Config == nil {
|
if frame.Fields[i].Config == nil {
|
||||||
frame.Fields[i].Config = &data.FieldConfig{}
|
frame.Fields[i].Config = &data.FieldConfig{}
|
||||||
|
@ -120,7 +120,7 @@ func TestAddConfigData(t *testing.T) {
|
|||||||
frame := data.Frame{
|
frame := data.Frame{
|
||||||
Fields: []*data.Field{&field},
|
Fields: []*data.Field{&field},
|
||||||
}
|
}
|
||||||
frameWithLink := addConfigData(frame, "http://ds")
|
frameWithLink := addConfigLinks(frame, "http://ds")
|
||||||
expectedFrameWithLink := data.Frame{
|
expectedFrameWithLink := data.Frame{
|
||||||
Fields: []*data.Field{
|
Fields: []*data.Field{
|
||||||
{
|
{
|
||||||
|
@ -28,6 +28,9 @@ type AzureMonitorDatasource struct {
|
|||||||
var (
|
var (
|
||||||
// 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds
|
// 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds
|
||||||
defaultAllowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000}
|
defaultAllowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000}
|
||||||
|
|
||||||
|
// Used to convert the aggregation value to the Azure enum for deep linking
|
||||||
|
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
|
||||||
)
|
)
|
||||||
|
|
||||||
const azureMonitorAPIVersion = "2018-01-01"
|
const azureMonitorAPIVersion = "2018-01-01"
|
||||||
@ -49,18 +52,7 @@ func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, ori
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, query := range queries {
|
for _, query := range queries {
|
||||||
queryRes, resp, err := e.executeQuery(ctx, query, dsInfo, client, url)
|
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo, client, url)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
frames, err := e.parseResponse(resp, query)
|
|
||||||
if err != nil {
|
|
||||||
queryRes.Error = err
|
|
||||||
} else {
|
|
||||||
queryRes.Frames = frames
|
|
||||||
}
|
|
||||||
result.Responses[query.RefID] = queryRes
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
@ -155,13 +147,13 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
|
|||||||
return azureMonitorQueries, nil
|
return azureMonitorQueries, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, dsInfo datasourceInfo, cli *http.Client, url string) (backend.DataResponse, AzureMonitorResponse, error) {
|
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, dsInfo datasourceInfo, cli *http.Client, url string) backend.DataResponse {
|
||||||
dataResponse := backend.DataResponse{}
|
dataResponse := backend.DataResponse{}
|
||||||
|
|
||||||
req, err := e.createRequest(ctx, dsInfo, url)
|
req, err := e.createRequest(ctx, dsInfo, url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dataResponse.Error = err
|
dataResponse.Error = err
|
||||||
return dataResponse, AzureMonitorResponse{}, nil
|
return dataResponse
|
||||||
}
|
}
|
||||||
|
|
||||||
req.URL.Path = path.Join(req.URL.Path, query.URL)
|
req.URL.Path = path.Join(req.URL.Path, query.URL)
|
||||||
@ -181,7 +173,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
|
|||||||
opentracing.HTTPHeaders,
|
opentracing.HTTPHeaders,
|
||||||
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
|
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
|
||||||
dataResponse.Error = err
|
dataResponse.Error = err
|
||||||
return dataResponse, AzureMonitorResponse{}, nil
|
return dataResponse
|
||||||
}
|
}
|
||||||
|
|
||||||
azlog.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
|
azlog.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
|
||||||
@ -189,7 +181,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
|
|||||||
res, err := ctxhttp.Do(ctx, cli, req)
|
res, err := ctxhttp.Do(ctx, cli, req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dataResponse.Error = err
|
dataResponse.Error = err
|
||||||
return dataResponse, AzureMonitorResponse{}, nil
|
return dataResponse
|
||||||
}
|
}
|
||||||
defer func() {
|
defer func() {
|
||||||
if err := res.Body.Close(); err != nil {
|
if err := res.Body.Close(); err != nil {
|
||||||
@ -200,10 +192,22 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
|
|||||||
data, err := e.unmarshalResponse(res)
|
data, err := e.unmarshalResponse(res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dataResponse.Error = err
|
dataResponse.Error = err
|
||||||
return dataResponse, AzureMonitorResponse{}, nil
|
return dataResponse
|
||||||
}
|
}
|
||||||
|
|
||||||
return dataResponse, data, nil
|
azurePortalUrl, err := getAzurePortalUrl(dsInfo.Cloud)
|
||||||
|
if err != nil {
|
||||||
|
dataResponse.Error = err
|
||||||
|
return dataResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
|
||||||
|
if err != nil {
|
||||||
|
dataResponse.Error = err
|
||||||
|
return dataResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
return dataResponse
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
|
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
|
||||||
@ -239,12 +243,16 @@ func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMon
|
|||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *AzureMonitorQuery) (
|
func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
|
||||||
data.Frames, error) {
|
|
||||||
if len(amr.Value) == 0 {
|
if len(amr.Value) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
queryUrl, err := getQueryUrl(query, azurePortalUrl)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
frames := data.Frames{}
|
frames := data.Frames{}
|
||||||
for _, series := range amr.Value[0].Timeseries {
|
for _, series := range amr.Value[0].Timeseries {
|
||||||
labels := data.Labels{}
|
labels := data.Labels{}
|
||||||
@ -299,12 +307,71 @@ func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *
|
|||||||
frame.SetRow(i, point.TimeStamp, value)
|
frame.SetRow(i, point.TimeStamp, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
frames = append(frames, frame)
|
frameWithLink := addConfigLinks(*frame, queryUrl)
|
||||||
|
frames = append(frames, &frameWithLink)
|
||||||
}
|
}
|
||||||
|
|
||||||
return frames, nil
|
return frames, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Gets the deep link for the given query
|
||||||
|
func getQueryUrl(query *AzureMonitorQuery, azurePortalUrl string) (string, error) {
|
||||||
|
aggregationType := aggregationTypeMap["Average"]
|
||||||
|
aggregation := query.Params.Get("aggregation")
|
||||||
|
if aggregation != "" {
|
||||||
|
if aggType, ok := aggregationTypeMap[aggregation]; ok {
|
||||||
|
aggregationType = aggType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
timespan, err := json.Marshal(map[string]interface{}{
|
||||||
|
"absolute": struct {
|
||||||
|
Start string `json:"startTime"`
|
||||||
|
End string `json:"endTime"`
|
||||||
|
}{
|
||||||
|
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
|
||||||
|
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
escapedTime := url.QueryEscape(string(timespan))
|
||||||
|
|
||||||
|
id := fmt.Sprintf("/subscriptions/%v/resourceGroups/%v/providers/%v/%v",
|
||||||
|
query.UrlComponents["subscription"],
|
||||||
|
query.UrlComponents["resourceGroup"],
|
||||||
|
query.UrlComponents["metricDefinition"],
|
||||||
|
query.UrlComponents["resourceName"],
|
||||||
|
)
|
||||||
|
chartDef, err := json.Marshal(map[string]interface{}{
|
||||||
|
"v2charts": []interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"metrics": []metricChartDefinition{
|
||||||
|
{
|
||||||
|
ResourceMetadata: map[string]string{
|
||||||
|
"id": id,
|
||||||
|
},
|
||||||
|
Name: query.Params.Get("metricnames"),
|
||||||
|
AggregationType: aggregationType,
|
||||||
|
Namespace: query.Params.Get("metricnamespace"),
|
||||||
|
MetricVisualization: metricVisualization{
|
||||||
|
DisplayName: query.Params.Get("metricnames"),
|
||||||
|
ResourceDisplayName: query.UrlComponents["resourceName"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
escapedChart := url.QueryEscape(string(chartDef))
|
||||||
|
|
||||||
|
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
|
||||||
|
}
|
||||||
|
|
||||||
// formatAzureMonitorLegendKey builds the legend key or timeseries name
|
// formatAzureMonitorLegendKey builds the legend key or timeseries name
|
||||||
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
|
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
|
||||||
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
|
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
|
||||||
|
@ -171,6 +171,14 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
|
|||||||
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(AzureMonitorQuery{}, "Params")); diff != "" {
|
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(AzureMonitorQuery{}, "Params")); diff != "" {
|
||||||
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
expected := `http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/` +
|
||||||
|
`TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%222018-03-15T13%3A00%3A00Z%22%2C%22endTime%22%3A%222018-03-15T13%3A34%3A00Z%22%7D%7D/` +
|
||||||
|
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C` +
|
||||||
|
`%22name%22%3A%22Percentage+CPU%22%2C%22aggregationType%22%3A4%2C%22namespace%22%3A%22Microsoft.Compute-virtualMachines%22%2C%22metricVisualization%22%3A%7B%22displayName%22%3A%22Percentage+CPU%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`
|
||||||
|
actual, err := getQueryUrl(queries[0], "http://ds")
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, expected, actual)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -182,7 +190,32 @@ func makeDates(startDate time.Time, count int, interval time.Duration) (times []
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func makeTestDataLink(url string) data.DataLink {
|
||||||
|
return data.DataLink{
|
||||||
|
Title: "View in Azure Portal",
|
||||||
|
TargetBlank: true,
|
||||||
|
URL: url,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestAzureMonitorParseResponse(t *testing.T) {
|
func TestAzureMonitorParseResponse(t *testing.T) {
|
||||||
|
// datalinks for the test frames
|
||||||
|
averageLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
|
||||||
|
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F%2FresourceGroups%2F%2Fproviders%2F%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A4%2C%22namespace%22%3A%22%22%2C` +
|
||||||
|
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
|
||||||
|
totalLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
|
||||||
|
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F%2FresourceGroups%2F%2Fproviders%2F%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A1%2C%22namespace%22%3A%22%22%2C` +
|
||||||
|
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
|
||||||
|
maxLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
|
||||||
|
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F%2FresourceGroups%2F%2Fproviders%2F%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A3%2C%22namespace%22%3A%22%22%2C` +
|
||||||
|
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
|
||||||
|
minLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
|
||||||
|
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F%2FresourceGroups%2F%2Fproviders%2F%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A2%2C%22namespace%22%3A%22%22%2C` +
|
||||||
|
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
|
||||||
|
countLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
|
||||||
|
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F%2FresourceGroups%2F%2Fproviders%2F%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A7%2C%22namespace%22%3A%22%22%2C` +
|
||||||
|
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
responseFile string
|
responseFile string
|
||||||
@ -204,10 +237,11 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC), 5, time.Minute)),
|
makeDates(time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC), 5, time.Minute),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Percentage CPU", nil, []*float64{
|
data.NewField("Percentage CPU", nil, []*float64{
|
||||||
ptr.Float64(2.0875), ptr.Float64(2.1525), ptr.Float64(2.155), ptr.Float64(3.6925), ptr.Float64(2.44),
|
ptr.Float64(2.0875), ptr.Float64(2.1525), ptr.Float64(2.155), ptr.Float64(3.6925), ptr.Float64(2.44),
|
||||||
}).SetConfig(&data.FieldConfig{Unit: "percent"})),
|
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{averageLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -224,10 +258,11 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute)),
|
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
|
||||||
data.NewField("Percentage CPU", nil, []*float64{
|
data.NewField("Percentage CPU", nil, []*float64{
|
||||||
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
|
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
|
||||||
}).SetConfig(&data.FieldConfig{Unit: "percent"})),
|
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{totalLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -244,10 +279,11 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 14, 26, 0, 0, time.UTC), 5, time.Minute)),
|
makeDates(time.Date(2019, 2, 9, 14, 26, 0, 0, time.UTC), 5, time.Minute),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{maxLink}}),
|
||||||
data.NewField("Percentage CPU", nil, []*float64{
|
data.NewField("Percentage CPU", nil, []*float64{
|
||||||
ptr.Float64(3.07), ptr.Float64(2.92), ptr.Float64(2.87), ptr.Float64(2.27), ptr.Float64(2.52),
|
ptr.Float64(3.07), ptr.Float64(2.92), ptr.Float64(2.87), ptr.Float64(2.27), ptr.Float64(2.52),
|
||||||
}).SetConfig(&data.FieldConfig{Unit: "percent"})),
|
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{maxLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -264,10 +300,11 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 14, 43, 0, 0, time.UTC), 5, time.Minute)),
|
makeDates(time.Date(2019, 2, 9, 14, 43, 0, 0, time.UTC), 5, time.Minute),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{minLink}}),
|
||||||
data.NewField("Percentage CPU", nil, []*float64{
|
data.NewField("Percentage CPU", nil, []*float64{
|
||||||
ptr.Float64(1.51), ptr.Float64(2.38), ptr.Float64(1.69), ptr.Float64(2.27), ptr.Float64(1.96),
|
ptr.Float64(1.51), ptr.Float64(2.38), ptr.Float64(1.69), ptr.Float64(2.27), ptr.Float64(1.96),
|
||||||
}).SetConfig(&data.FieldConfig{Unit: "percent"})),
|
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{minLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -284,10 +321,11 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 14, 44, 0, 0, time.UTC), 5, time.Minute)),
|
makeDates(time.Date(2019, 2, 9, 14, 44, 0, 0, time.UTC), 5, time.Minute),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{countLink}}),
|
||||||
data.NewField("Percentage CPU", nil, []*float64{
|
data.NewField("Percentage CPU", nil, []*float64{
|
||||||
ptr.Float64(4), ptr.Float64(4), ptr.Float64(4), ptr.Float64(4), ptr.Float64(4),
|
ptr.Float64(4), ptr.Float64(4), ptr.Float64(4), ptr.Float64(4), ptr.Float64(4),
|
||||||
}).SetConfig(&data.FieldConfig{Unit: "percent"})),
|
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{countLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -304,21 +342,24 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Count", data.Labels{"blobtype": "PageBlob"},
|
data.NewField("Blob Count", data.Labels{"blobtype": "PageBlob"},
|
||||||
[]*float64{ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), nil}).SetConfig(&data.FieldConfig{Unit: "short"})),
|
[]*float64{ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
|
||||||
|
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Count", data.Labels{"blobtype": "BlockBlob"},
|
data.NewField("Blob Count", data.Labels{"blobtype": "BlockBlob"},
|
||||||
[]*float64{ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), nil}).SetConfig(&data.FieldConfig{Unit: "short"})),
|
[]*float64{ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
|
||||||
|
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Count", data.Labels{"blobtype": "Azure Data Lake Storage"},
|
data.NewField("Blob Count", data.Labels{"blobtype": "Azure Data Lake Storage"},
|
||||||
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), nil}).SetConfig(&data.FieldConfig{Unit: "short"})),
|
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -336,10 +377,11 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute)),
|
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
|
||||||
data.NewField("Percentage CPU", nil, []*float64{
|
data.NewField("Percentage CPU", nil, []*float64{
|
||||||
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
|
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
|
||||||
}).SetConfig(&data.FieldConfig{Unit: "percent", DisplayName: "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU"})),
|
}).SetConfig(&data.FieldConfig{Unit: "percent", DisplayName: "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU", Links: []data.DataLink{totalLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -357,23 +399,26 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Count", data.Labels{"blobtype": "PageBlob"},
|
data.NewField("Blob Count", data.Labels{"blobtype": "PageBlob"},
|
||||||
[]*float64{ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), nil}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=PageBlob"})),
|
[]*float64{ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), nil}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=PageBlob", Links: []data.DataLink{averageLink}})),
|
||||||
|
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Count", data.Labels{"blobtype": "BlockBlob"}, []*float64{
|
data.NewField("Blob Count", data.Labels{"blobtype": "BlockBlob"}, []*float64{
|
||||||
ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), nil,
|
ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), nil,
|
||||||
}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=BlockBlob"})),
|
}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=BlockBlob", Links: []data.DataLink{averageLink}})),
|
||||||
|
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Count", data.Labels{"blobtype": "Azure Data Lake Storage"}, []*float64{
|
data.NewField("Blob Count", data.Labels{"blobtype": "Azure Data Lake Storage"}, []*float64{
|
||||||
ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), nil,
|
ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), nil,
|
||||||
}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=Azure Data Lake Storage"})),
|
}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=Azure Data Lake Storage", Links: []data.DataLink{averageLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -391,24 +436,27 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour)),
|
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Capacity", data.Labels{"blobtype": "PageBlob", "tier": "Standard"},
|
data.NewField("Blob Capacity", data.Labels{"blobtype": "PageBlob", "tier": "Standard"},
|
||||||
[]*float64{ptr.Float64(675530), ptr.Float64(675530), ptr.Float64(675530)}).SetConfig(
|
[]*float64{ptr.Float64(675530), ptr.Float64(675530), ptr.Float64(675530)}).SetConfig(
|
||||||
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=PageBlob, Tier=Standard}"})),
|
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=PageBlob, Tier=Standard}", Links: []data.DataLink{averageLink}})),
|
||||||
|
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour)),
|
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Capacity", data.Labels{"blobtype": "BlockBlob", "tier": "Hot"},
|
data.NewField("Blob Capacity", data.Labels{"blobtype": "BlockBlob", "tier": "Hot"},
|
||||||
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0)}).SetConfig(
|
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0)}).SetConfig(
|
||||||
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=BlockBlob, Tier=Hot}"})),
|
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=BlockBlob, Tier=Hot}", Links: []data.DataLink{averageLink}})),
|
||||||
|
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour)),
|
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Blob Capacity", data.Labels{"blobtype": "Azure Data Lake Storage", "tier": "Cool"},
|
data.NewField("Blob Capacity", data.Labels{"blobtype": "Azure Data Lake Storage", "tier": "Cool"},
|
||||||
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0)}).SetConfig(
|
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0)}).SetConfig(
|
||||||
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=Azure Data Lake Storage, Tier=Cool}"})),
|
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=Azure Data Lake Storage, Tier=Cool}", Links: []data.DataLink{averageLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -426,10 +474,11 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
expectedFrames: data.Frames{
|
expectedFrames: data.Frames{
|
||||||
data.NewFrame("",
|
data.NewFrame("",
|
||||||
data.NewField("Time", nil,
|
data.NewField("Time", nil,
|
||||||
[]time.Time{time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC)}),
|
[]time.Time{time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC)},
|
||||||
|
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
|
||||||
data.NewField("Percentage CPU", nil, []*float64{
|
data.NewField("Percentage CPU", nil, []*float64{
|
||||||
ptr.Float64(2.0875),
|
ptr.Float64(2.0875),
|
||||||
}).SetConfig(&data.FieldConfig{DisplayName: "custom"})),
|
}).SetConfig(&data.FieldConfig{DisplayName: "custom", Links: []data.DataLink{averageLink}})),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -438,7 +487,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
|
|||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
azData := loadTestFile(t, "azuremonitor/"+tt.responseFile)
|
azData := loadTestFile(t, "azuremonitor/"+tt.responseFile)
|
||||||
dframes, err := datasource.parseResponse(azData, tt.mockQuery)
|
dframes, err := datasource.parseResponse(azData, tt.mockQuery, "http://ds")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, dframes)
|
require.NotNil(t, dframes)
|
||||||
|
|
||||||
|
@ -155,6 +155,22 @@ type argJSONQuery struct {
|
|||||||
} `json:"azureResourceGraph"`
|
} `json:"azureResourceGraph"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// metricChartDefinition is the JSON model for a metrics chart definition
|
||||||
|
type metricChartDefinition struct {
|
||||||
|
ResourceMetadata map[string]string `json:"resourceMetadata"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
AggregationType int `json:"aggregationType"`
|
||||||
|
Namespace string `json:"namespace"`
|
||||||
|
MetricVisualization metricVisualization `json:"metricVisualization"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// metricVisualization is the JSON model for the visualization field of a
|
||||||
|
// metricChartDefinition
|
||||||
|
type metricVisualization struct {
|
||||||
|
DisplayName string `json:"displayName"`
|
||||||
|
ResourceDisplayName string `json:"resourceDisplayName"`
|
||||||
|
}
|
||||||
|
|
||||||
// InsightsDimensions will unmarshal from a JSON string, or an array of strings,
|
// InsightsDimensions will unmarshal from a JSON string, or an array of strings,
|
||||||
// into a string array. This exists to support an older query format which is updated
|
// into a string array. This exists to support an older query format which is updated
|
||||||
// when a user saves the query or it is sent from the front end, but may not be when
|
// when a user saves the query or it is sent from the front end, but may not be when
|
||||||
|
@ -9,20 +9,10 @@ import {
|
|||||||
AzureMonitorMetricDefinitionsResponse,
|
AzureMonitorMetricDefinitionsResponse,
|
||||||
AzureMonitorResourceGroupsResponse,
|
AzureMonitorResourceGroupsResponse,
|
||||||
AzureQueryType,
|
AzureQueryType,
|
||||||
AzureMetricQuery,
|
|
||||||
DatasourceValidationResult,
|
DatasourceValidationResult,
|
||||||
} from '../types';
|
} from '../types';
|
||||||
import {
|
import { DataSourceInstanceSettings, ScopedVars, MetricFindValue } from '@grafana/data';
|
||||||
DataSourceInstanceSettings,
|
|
||||||
ScopedVars,
|
|
||||||
MetricFindValue,
|
|
||||||
DataQueryResponse,
|
|
||||||
DataQueryRequest,
|
|
||||||
TimeRange,
|
|
||||||
} from '@grafana/data';
|
|
||||||
import { DataSourceWithBackend, getTemplateSrv } from '@grafana/runtime';
|
import { DataSourceWithBackend, getTemplateSrv } from '@grafana/runtime';
|
||||||
import { from, Observable } from 'rxjs';
|
|
||||||
import { mergeMap } from 'rxjs/operators';
|
|
||||||
|
|
||||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||||
import { getAuthType, getAzureCloud, getAzurePortalUrl } from '../credentials';
|
import { getAuthType, getAzureCloud, getAzurePortalUrl } from '../credentials';
|
||||||
@ -31,16 +21,6 @@ import { routeNames } from '../utils/common';
|
|||||||
|
|
||||||
const defaultDropdownValue = 'select';
|
const defaultDropdownValue = 'select';
|
||||||
|
|
||||||
// Used to convert our aggregation value to the Azure enum for deep linking
|
|
||||||
const aggregationTypeMap: Record<string, number> = {
|
|
||||||
None: 0,
|
|
||||||
Total: 1,
|
|
||||||
Minimum: 2,
|
|
||||||
Maximum: 3,
|
|
||||||
Average: 4,
|
|
||||||
Count: 7,
|
|
||||||
};
|
|
||||||
|
|
||||||
export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureMonitorQuery, AzureDataSourceJsonData> {
|
export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureMonitorQuery, AzureDataSourceJsonData> {
|
||||||
apiVersion = '2018-01-01';
|
apiVersion = '2018-01-01';
|
||||||
apiPreviewVersion = '2017-12-01-preview';
|
apiPreviewVersion = '2017-12-01-preview';
|
||||||
@ -86,90 +66,6 @@ export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureM
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
query(request: DataQueryRequest<AzureMonitorQuery>): Observable<DataQueryResponse> {
|
|
||||||
const metricQueries = request.targets.reduce((prev: Record<string, AzureMonitorQuery>, cur) => {
|
|
||||||
prev[cur.refId] = cur;
|
|
||||||
return prev;
|
|
||||||
}, {});
|
|
||||||
|
|
||||||
return super.query(request).pipe(
|
|
||||||
mergeMap((res: DataQueryResponse) => {
|
|
||||||
return from(this.processResponse(res, metricQueries));
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
async processResponse(
|
|
||||||
res: DataQueryResponse,
|
|
||||||
metricQueries: Record<string, AzureMonitorQuery>
|
|
||||||
): Promise<DataQueryResponse> {
|
|
||||||
if (res.data) {
|
|
||||||
for (const df of res.data) {
|
|
||||||
const metricQuery = metricQueries[df.refId];
|
|
||||||
if (!metricQuery.azureMonitor || !metricQuery.subscription) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = this.buildAzurePortalUrl(
|
|
||||||
metricQuery.azureMonitor,
|
|
||||||
metricQuery.subscription,
|
|
||||||
this.timeSrv.timeRange()
|
|
||||||
);
|
|
||||||
|
|
||||||
for (const field of df.fields) {
|
|
||||||
field.config.links = [
|
|
||||||
{
|
|
||||||
url: url,
|
|
||||||
title: 'View in Azure Portal',
|
|
||||||
targetBlank: true,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
stringifyAzurePortalUrlParam(value: string | object): string {
|
|
||||||
const stringValue = typeof value === 'string' ? value : JSON.stringify(value);
|
|
||||||
return encodeURIComponent(stringValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
buildAzurePortalUrl(metricQuery: AzureMetricQuery, subscriptionId: string, timeRange: TimeRange) {
|
|
||||||
const aggregationType =
|
|
||||||
(metricQuery.aggregation && aggregationTypeMap[metricQuery.aggregation]) ?? aggregationTypeMap.Average;
|
|
||||||
|
|
||||||
const chartDef = this.stringifyAzurePortalUrlParam({
|
|
||||||
v2charts: [
|
|
||||||
{
|
|
||||||
metrics: [
|
|
||||||
{
|
|
||||||
resourceMetadata: {
|
|
||||||
id: `/subscriptions/${subscriptionId}/resourceGroups/${metricQuery.resourceGroup}/providers/${metricQuery.metricDefinition}/${metricQuery.resourceName}`,
|
|
||||||
},
|
|
||||||
name: metricQuery.metricName,
|
|
||||||
aggregationType: aggregationType,
|
|
||||||
namespace: metricQuery.metricNamespace,
|
|
||||||
metricVisualization: {
|
|
||||||
displayName: metricQuery.metricName,
|
|
||||||
resourceDisplayName: metricQuery.resourceName,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const timeContext = this.stringifyAzurePortalUrlParam({
|
|
||||||
absolute: {
|
|
||||||
startTime: timeRange.from,
|
|
||||||
endTime: timeRange.to,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return `${this.azurePortalUrl}/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/${timeContext}/ChartDefinition/${chartDef}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
applyTemplateVariables(target: AzureMonitorQuery, scopedVars: ScopedVars): AzureMonitorQuery {
|
applyTemplateVariables(target: AzureMonitorQuery, scopedVars: ScopedVars): AzureMonitorQuery {
|
||||||
const item = target.azureMonitor;
|
const item = target.azureMonitor;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user