mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Azure Monitor: Change response to be dataframes (#25123)
note: This is just Azure Monitor within the Azure Monitor datasource (not insights, insights analytics, or log analytics yet). Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
This commit is contained in:
parent
07582a8e85
commit
376a9d35e4
@ -5,8 +5,9 @@ import {
|
||||
DataSourceInstanceSettings,
|
||||
DataQuery,
|
||||
DataSourceJsonData,
|
||||
ScopedVars,
|
||||
} from '@grafana/data';
|
||||
import { Observable, from } from 'rxjs';
|
||||
import { Observable, from, of } from 'rxjs';
|
||||
import { config } from '..';
|
||||
import { getBackendSrv } from '../services';
|
||||
import { toDataQueryResponse } from './queryResponse';
|
||||
@ -53,9 +54,13 @@ export class DataSourceWithBackend<
|
||||
/**
|
||||
* Ideally final -- any other implementation may not work as expected
|
||||
*/
|
||||
query(request: DataQueryRequest): Observable<DataQueryResponse> {
|
||||
const { targets, intervalMs, maxDataPoints, range, requestId } = request;
|
||||
query(request: DataQueryRequest<TQuery>): Observable<DataQueryResponse> {
|
||||
const { intervalMs, maxDataPoints, range, requestId } = request;
|
||||
const orgId = config.bootData.user.orgId;
|
||||
let targets = request.targets;
|
||||
if (this.filterQuery) {
|
||||
targets = targets.filter(q => this.filterQuery!(q));
|
||||
}
|
||||
const queries = targets.map(q => {
|
||||
if (q.datasource === ExpressionDatasourceID) {
|
||||
return {
|
||||
@ -70,7 +75,7 @@ export class DataSourceWithBackend<
|
||||
throw new Error('Unknown Datasource: ' + q.datasource);
|
||||
}
|
||||
return {
|
||||
...this.applyTemplateVariables(q),
|
||||
...this.applyTemplateVariables(q, request.scopedVars),
|
||||
datasourceId: ds.id,
|
||||
intervalMs,
|
||||
maxDataPoints,
|
||||
@ -78,6 +83,11 @@ export class DataSourceWithBackend<
|
||||
};
|
||||
});
|
||||
|
||||
// Return early if no queries exist
|
||||
if (!queries.length) {
|
||||
return of({ data: [] });
|
||||
}
|
||||
|
||||
const body: any = {
|
||||
queries,
|
||||
};
|
||||
@ -105,12 +115,19 @@ export class DataSourceWithBackend<
|
||||
return from(req);
|
||||
}
|
||||
|
||||
/**
|
||||
* Override to skip executing a query
|
||||
*
|
||||
* @virtual
|
||||
*/
|
||||
filterQuery?(query: TQuery): boolean;
|
||||
|
||||
/**
|
||||
* Override to apply template variables
|
||||
*
|
||||
* @virtual
|
||||
*/
|
||||
applyTemplateVariables(query: DataQuery) {
|
||||
applyTemplateVariables(query: TQuery, scopedVars: ScopedVars): Record<string, any> {
|
||||
return query;
|
||||
}
|
||||
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
@ -20,7 +21,6 @@ import (
|
||||
opentracing "github.com/opentracing/opentracing-go"
|
||||
"golang.org/x/net/context/ctxhttp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
@ -260,25 +260,32 @@ func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMon
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data AzureMonitorResponse, query *AzureMonitorQuery) error {
|
||||
if len(data.Value) == 0 {
|
||||
func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, amr AzureMonitorResponse, query *AzureMonitorQuery) error {
|
||||
if len(amr.Value) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, series := range data.Value[0].Timeseries {
|
||||
points := []tsdb.TimePoint{}
|
||||
|
||||
for _, series := range amr.Value[0].Timeseries {
|
||||
metadataName := ""
|
||||
metadataValue := ""
|
||||
if len(series.Metadatavalues) > 0 {
|
||||
metadataName = series.Metadatavalues[0].Name.LocalizedValue
|
||||
metadataValue = series.Metadatavalues[0].Value
|
||||
}
|
||||
metricName := formatAzureMonitorLegendKey(query.Alias, query.UrlComponents["resourceName"], data.Value[0].Name.LocalizedValue, metadataName, metadataValue, data.Namespace, data.Value[0].ID)
|
||||
metricName := formatAzureMonitorLegendKey(query.Alias, query.UrlComponents["resourceName"], amr.Value[0].Name.LocalizedValue, metadataName, metadataValue, amr.Namespace, amr.Value[0].ID)
|
||||
|
||||
for _, point := range series.Data {
|
||||
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeFloat64)
|
||||
frame.RefID = query.RefID
|
||||
frame.Fields[1].Name = metricName
|
||||
frame.Fields[1].SetConfig(&data.FieldConfig{
|
||||
Unit: amr.Value[0].Unit,
|
||||
})
|
||||
|
||||
requestedAgg := query.Params.Get("aggregation")
|
||||
|
||||
for i, point := range series.Data {
|
||||
var value float64
|
||||
switch query.Params.Get("aggregation") {
|
||||
switch requestedAgg {
|
||||
case "Average":
|
||||
value = point.Average
|
||||
case "Total":
|
||||
@ -292,15 +299,17 @@ func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data
|
||||
default:
|
||||
value = point.Count
|
||||
}
|
||||
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.TimeStamp).Unix())*1000))
|
||||
|
||||
frame.SetRow(i, point.TimeStamp, value)
|
||||
}
|
||||
|
||||
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
|
||||
Name: metricName,
|
||||
Points: points,
|
||||
})
|
||||
encodedFrame, err := frame.MarshalArrow()
|
||||
if err != nil {
|
||||
queryRes.Error = fmt.Errorf("failed to encode dataframe response into arrow: %w", err)
|
||||
}
|
||||
|
||||
queryRes.Dataframes = append(queryRes.Dataframes, encodedFrame)
|
||||
}
|
||||
queryRes.Meta.Set("unit", data.Value[0].Unit)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -9,19 +9,99 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAzureMonitorDatasource(t *testing.T) {
|
||||
Convey("AzureMonitorDatasource", t, func() {
|
||||
datasource := &AzureMonitorDatasource{}
|
||||
func TestAzureMonitorBuildQueries(t *testing.T) {
|
||||
datasource := &AzureMonitorDatasource{}
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
|
||||
Convey("Parse queries from frontend and build AzureMonitor API queries", func() {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
tests := []struct {
|
||||
name string
|
||||
azureMonitorVariedProperties map[string]interface{}
|
||||
azureMonitorQueryTarget string
|
||||
expectedInterval string
|
||||
queryIntervalMS int64
|
||||
}{
|
||||
{
|
||||
name: "Parse queries from frontend and build AzureMonitor API queries",
|
||||
azureMonitorVariedProperties: map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"top": "10",
|
||||
},
|
||||
expectedInterval: "PT1M",
|
||||
azureMonitorQueryTarget: "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
|
||||
},
|
||||
{
|
||||
name: "time grain set to auto",
|
||||
azureMonitorVariedProperties: map[string]interface{}{
|
||||
"timeGrain": "auto",
|
||||
"top": "10",
|
||||
},
|
||||
queryIntervalMS: 400000,
|
||||
expectedInterval: "PT15M",
|
||||
azureMonitorQueryTarget: "aggregation=Average&api-version=2018-01-01&interval=PT15M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
|
||||
},
|
||||
{
|
||||
name: "time grain set to auto",
|
||||
azureMonitorVariedProperties: map[string]interface{}{
|
||||
"timeGrain": "auto",
|
||||
"allowedTimeGrainsMs": []int64{60000, 300000},
|
||||
"top": "10",
|
||||
},
|
||||
queryIntervalMS: 400000,
|
||||
expectedInterval: "PT5M",
|
||||
azureMonitorQueryTarget: "aggregation=Average&api-version=2018-01-01&interval=PT5M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
|
||||
},
|
||||
{
|
||||
name: "has a dimension filter",
|
||||
azureMonitorVariedProperties: map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"dimension": "blob",
|
||||
"dimensionFilter": "*",
|
||||
"top": "30",
|
||||
},
|
||||
queryIntervalMS: 400000,
|
||||
expectedInterval: "PT1M",
|
||||
azureMonitorQueryTarget: "%24filter=blob+eq+%27%2A%27&aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
|
||||
},
|
||||
{
|
||||
name: "has a dimension filter",
|
||||
azureMonitorVariedProperties: map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"dimension": "None",
|
||||
"dimensionFilter": "*",
|
||||
"top": "10",
|
||||
},
|
||||
queryIntervalMS: 400000,
|
||||
expectedInterval: "PT1M",
|
||||
azureMonitorQueryTarget: "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
|
||||
},
|
||||
}
|
||||
|
||||
commonAzureModelProps := map[string]interface{}{
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricNamespace": "Microsoft.Compute-virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
for k, v := range commonAzureModelProps {
|
||||
tt.azureMonitorVariedProperties[k] = v
|
||||
}
|
||||
tsdbQuery := &tsdb.TsdbQuery{
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
@ -36,357 +116,325 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
},
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"subscription": "12345678-aaaa-bbbb-cccc-123456789abc",
|
||||
"azureMonitor": map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricNamespace": "Microsoft.Compute-virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"top": "10",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
},
|
||||
}),
|
||||
RefId: "A",
|
||||
"azureMonitor": tt.azureMonitorVariedProperties,
|
||||
},
|
||||
),
|
||||
RefId: "A",
|
||||
IntervalMs: tt.queryIntervalMS,
|
||||
},
|
||||
},
|
||||
}
|
||||
Convey("and is a normal query", func() {
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].RefID, ShouldEqual, "A")
|
||||
So(queries[0].URL, ShouldEqual, "12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics")
|
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
|
||||
So(len(queries[0].Params), ShouldEqual, 6)
|
||||
So(queries[0].Params["timespan"][0], ShouldEqual, "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z")
|
||||
So(queries[0].Params["api-version"][0], ShouldEqual, "2018-01-01")
|
||||
So(queries[0].Params["aggregation"][0], ShouldEqual, "Average")
|
||||
So(queries[0].Params["metricnames"][0], ShouldEqual, "Percentage CPU")
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT1M")
|
||||
So(queries[0].Alias, ShouldEqual, "testalias")
|
||||
})
|
||||
|
||||
Convey("and has a time grain set to auto", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureMonitor": map[string]interface{}{
|
||||
"timeGrain": "auto",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricNamespace": "Microsoft.Compute-virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
},
|
||||
})
|
||||
tsdbQuery.Queries[0].IntervalMs = 400000
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT15M")
|
||||
})
|
||||
|
||||
Convey("and has a time grain set to auto and the metric has a limited list of allowed time grains", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureMonitor": map[string]interface{}{
|
||||
"timeGrain": "auto",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricNamespace": "Microsoft.Compute-virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
"allowedTimeGrainsMs": []int64{60000, 300000},
|
||||
},
|
||||
})
|
||||
tsdbQuery.Queries[0].IntervalMs = 400000
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT5M")
|
||||
})
|
||||
|
||||
Convey("and has a dimension filter", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureMonitor": map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricNamespace": "Microsoft.Compute-virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
"dimension": "blob",
|
||||
"dimensionFilter": "*",
|
||||
"top": "30",
|
||||
},
|
||||
})
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Target, ShouldEqual, "%24filter=blob+eq+%27%2A%27&aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30")
|
||||
|
||||
})
|
||||
|
||||
Convey("and has a dimension filter set to None", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureMonitor": map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricNamespace": "Microsoft.Compute-virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
"dimension": "None",
|
||||
"dimensionFilter": "*",
|
||||
"top": "10",
|
||||
},
|
||||
})
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute-virtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
|
||||
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Parse AzureMonitor API response in the time series format", func() {
|
||||
Convey("when data from query aggregated as average to one time series", func() {
|
||||
data, err := loadTestFile("azuremonitor/1-azure-monitor-response-avg.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(data.Interval, ShouldEqual, "PT1M")
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Average"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(res.Series), ShouldEqual, 1)
|
||||
So(res.Series[0].Name, ShouldEqual, "grafana.Percentage CPU")
|
||||
So(len(res.Series[0].Points), ShouldEqual, 5)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 2.0875)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, int64(1549620780000))
|
||||
|
||||
So(res.Series[0].Points[1][0].Float64, ShouldEqual, 2.1525)
|
||||
So(res.Series[0].Points[1][1].Float64, ShouldEqual, int64(1549620840000))
|
||||
|
||||
So(res.Series[0].Points[2][0].Float64, ShouldEqual, 2.155)
|
||||
So(res.Series[0].Points[2][1].Float64, ShouldEqual, int64(1549620900000))
|
||||
|
||||
So(res.Series[0].Points[3][0].Float64, ShouldEqual, 3.6925)
|
||||
So(res.Series[0].Points[3][1].Float64, ShouldEqual, int64(1549620960000))
|
||||
|
||||
So(res.Series[0].Points[4][0].Float64, ShouldEqual, 2.44)
|
||||
So(res.Series[0].Points[4][1].Float64, ShouldEqual, int64(1549621020000))
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as total to one time series", func() {
|
||||
data, err := loadTestFile("azuremonitor/2-azure-monitor-response-total.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Total"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 8.26)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, int64(1549718940000))
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as maximum to one time series", func() {
|
||||
data, err := loadTestFile("azuremonitor/3-azure-monitor-response-maximum.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Maximum"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3.07)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, int64(1549722360000))
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as minimum to one time series", func() {
|
||||
data, err := loadTestFile("azuremonitor/4-azure-monitor-response-minimum.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Minimum"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 1.51)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, int64(1549723380000))
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as Count to one time series", func() {
|
||||
data, err := loadTestFile("azuremonitor/5-azure-monitor-response-count.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Count"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 4)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, int64(1549723440000))
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as total and has dimension filter", func() {
|
||||
data, err := loadTestFile("azuremonitor/6-azure-monitor-response-multi-dimension.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Average"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
So(len(res.Series), ShouldEqual, 3)
|
||||
|
||||
So(res.Series[0].Name, ShouldEqual, "grafana{blobtype=PageBlob}.Blob Count")
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3)
|
||||
|
||||
So(res.Series[1].Name, ShouldEqual, "grafana{blobtype=BlockBlob}.Blob Count")
|
||||
So(res.Series[1].Points[0][0].Float64, ShouldEqual, 1)
|
||||
|
||||
So(res.Series[2].Name, ShouldEqual, "grafana{blobtype=Azure Data Lake Storage}.Blob Count")
|
||||
So(res.Series[2].Points[0][0].Float64, ShouldEqual, 0)
|
||||
})
|
||||
|
||||
Convey("when data from query has alias patterns", func() {
|
||||
data, err := loadTestFile("azuremonitor/2-azure-monitor-response-total.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Total"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Name, ShouldEqual, "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU")
|
||||
})
|
||||
|
||||
Convey("when data has dimension filters and alias patterns", func() {
|
||||
data, err := loadTestFile("azuremonitor/6-azure-monitor-response-multi-dimension.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
Alias: "{{dimensionname}}={{DimensionValue}}",
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Average"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Name, ShouldEqual, "blobtype=PageBlob")
|
||||
So(res.Series[1].Name, ShouldEqual, "blobtype=BlockBlob")
|
||||
So(res.Series[2].Name, ShouldEqual, "blobtype=Azure Data Lake Storage")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Find closest allowed interval for auto time grain", func() {
|
||||
intervals := map[string]int64{
|
||||
"3m": 180000,
|
||||
"5m": 300000,
|
||||
"10m": 600000,
|
||||
"15m": 900000,
|
||||
"1d": 86400000,
|
||||
"2d": 172800000,
|
||||
azureMonitorQuery := &AzureMonitorQuery{
|
||||
URL: "12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
|
||||
UrlComponents: map[string]string{
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"subscription": "12345678-aaaa-bbbb-cccc-123456789abc",
|
||||
},
|
||||
Target: tt.azureMonitorQueryTarget,
|
||||
RefID: "A",
|
||||
Alias: "testalias",
|
||||
}
|
||||
|
||||
closest := findClosestAllowedIntervalMS(intervals["3m"], []int64{})
|
||||
So(closest, ShouldEqual, intervals["5m"])
|
||||
|
||||
closest = findClosestAllowedIntervalMS(intervals["10m"], []int64{})
|
||||
So(closest, ShouldEqual, intervals["15m"])
|
||||
|
||||
closest = findClosestAllowedIntervalMS(intervals["2d"], []int64{})
|
||||
So(closest, ShouldEqual, intervals["1d"])
|
||||
|
||||
closest = findClosestAllowedIntervalMS(intervals["3m"], []int64{intervals["1d"]})
|
||||
So(closest, ShouldEqual, intervals["1d"])
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(AzureMonitorQuery{}, "Params")); diff != "" {
|
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func makeDates(startDate time.Time, count int, interval time.Duration) (times []time.Time) {
|
||||
for i := 0; i < count; i++ {
|
||||
times = append(times, startDate.Add(interval*time.Duration(i)))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func TestAzureMonitorParseResponse(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
responseFile string
|
||||
mockQuery *AzureMonitorQuery
|
||||
expectedFrames data.Frames
|
||||
queryIntervalMS int64
|
||||
}{
|
||||
{
|
||||
name: "average aggregate time series response",
|
||||
responseFile: "1-azure-monitor-response-avg.json",
|
||||
mockQuery: &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Average"},
|
||||
},
|
||||
},
|
||||
expectedFrames: data.Frames{
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC), 5, time.Minute)),
|
||||
data.NewField("grafana.Percentage CPU", nil, []float64{
|
||||
2.0875, 2.1525, 2.155, 3.6925, 2.44,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Percent"})),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "total aggregate time series response",
|
||||
responseFile: "2-azure-monitor-response-total.json",
|
||||
mockQuery: &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Total"},
|
||||
},
|
||||
},
|
||||
expectedFrames: data.Frames{
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute)),
|
||||
data.NewField("grafana.Percentage CPU", nil, []float64{
|
||||
8.26, 8.7, 14.82, 10.07, 8.52,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Percent"})),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "maximum aggregate time series response",
|
||||
responseFile: "3-azure-monitor-response-maximum.json",
|
||||
mockQuery: &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Maximum"},
|
||||
},
|
||||
},
|
||||
expectedFrames: data.Frames{
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 14, 26, 0, 0, time.UTC), 5, time.Minute)),
|
||||
data.NewField("grafana.Percentage CPU", nil, []float64{
|
||||
3.07, 2.92, 2.87, 2.27, 2.52,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Percent"})),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "minimum aggregate time series response",
|
||||
responseFile: "4-azure-monitor-response-minimum.json",
|
||||
mockQuery: &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Minimum"},
|
||||
},
|
||||
},
|
||||
expectedFrames: data.Frames{
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 14, 43, 0, 0, time.UTC), 5, time.Minute)),
|
||||
data.NewField("grafana.Percentage CPU", nil, []float64{
|
||||
1.51, 2.38, 1.69, 2.27, 1.96,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Percent"})),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "count aggregate time series response",
|
||||
responseFile: "5-azure-monitor-response-count.json",
|
||||
mockQuery: &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Count"},
|
||||
},
|
||||
},
|
||||
expectedFrames: data.Frames{
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 14, 44, 0, 0, time.UTC), 5, time.Minute)),
|
||||
data.NewField("grafana.Percentage CPU", nil, []float64{
|
||||
4, 4, 4, 4, 4,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Percent"})),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multi dimension time series response",
|
||||
responseFile: "6-azure-monitor-response-multi-dimension.json",
|
||||
mockQuery: &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Average"},
|
||||
},
|
||||
},
|
||||
// Regarding multi-dimensional response:
|
||||
// - It seems they all share the same time index, so maybe can be a wide frame.
|
||||
// - Due to the type for the Azure monitor response, nulls currently become 0.
|
||||
// - blogtype=X should maybe become labels.
|
||||
expectedFrames: data.Frames{
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
||||
data.NewField("grafana{blobtype=PageBlob}.Blob Count", nil, []float64{
|
||||
3, 3, 3, 3, 3, 0,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Count"})),
|
||||
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
||||
data.NewField("grafana{blobtype=BlockBlob}.Blob Count", nil, []float64{
|
||||
1, 1, 1, 1, 1, 0,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Count"})),
|
||||
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
||||
data.NewField("grafana{blobtype=Azure Data Lake Storage}.Blob Count", nil, []float64{
|
||||
0, 0, 0, 0, 0, 0,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Count"})),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "with alias patterns in the query",
|
||||
responseFile: "2-azure-monitor-response-total.json",
|
||||
mockQuery: &AzureMonitorQuery{
|
||||
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Total"},
|
||||
},
|
||||
},
|
||||
expectedFrames: data.Frames{
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute)),
|
||||
data.NewField("custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU", nil, []float64{
|
||||
8.26, 8.7, 14.82, 10.07, 8.52,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Percent"})),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multi dimension with alias",
|
||||
responseFile: "6-azure-monitor-response-multi-dimension.json",
|
||||
mockQuery: &AzureMonitorQuery{
|
||||
Alias: "{{dimensionname}}={{DimensionValue}}",
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Average"},
|
||||
},
|
||||
},
|
||||
expectedFrames: data.Frames{
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
||||
data.NewField("blobtype=PageBlob", nil, []float64{
|
||||
3, 3, 3, 3, 3, 0,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Count"})),
|
||||
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
||||
data.NewField("blobtype=BlockBlob", nil, []float64{
|
||||
1, 1, 1, 1, 1, 0,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Count"})),
|
||||
|
||||
data.NewFrame("",
|
||||
data.NewField("", nil,
|
||||
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour)),
|
||||
data.NewField("blobtype=Azure Data Lake Storage", nil, []float64{
|
||||
0, 0, 0, 0, 0, 0,
|
||||
}).SetConfig(&data.FieldConfig{Unit: "Count"})),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
datasource := &AzureMonitorDatasource{}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
azData, err := loadTestFile("azuremonitor/" + tt.responseFile)
|
||||
require.NoError(t, err)
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
err = datasource.parseResponse(res, azData, tt.mockQuery)
|
||||
require.NoError(t, err)
|
||||
|
||||
frames, err := data.UnmarshalArrowFrames(res.Dataframes)
|
||||
require.NoError(t, err)
|
||||
if diff := cmp.Diff(tt.expectedFrames, frames, data.FrameTestCompareOptions()...); diff != "" {
|
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindClosestAllowIntervalMS(t *testing.T) {
|
||||
humanIntervalToMS := map[string]int64{
|
||||
"3m": 180000,
|
||||
"5m": 300000,
|
||||
"10m": 600000,
|
||||
"15m": 900000,
|
||||
"1d": 86400000,
|
||||
"2d": 172800000,
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
allowedTimeGrains []int64 // Note: Uses defaults when empty list
|
||||
inputInterval int64
|
||||
expectedInterval int64
|
||||
}{
|
||||
{
|
||||
name: "closest to 3m is 5m",
|
||||
allowedTimeGrains: []int64{},
|
||||
inputInterval: humanIntervalToMS["3m"],
|
||||
expectedInterval: humanIntervalToMS["5m"],
|
||||
},
|
||||
{
|
||||
name: "closest to 10m is 15m",
|
||||
allowedTimeGrains: []int64{},
|
||||
inputInterval: humanIntervalToMS["10m"],
|
||||
expectedInterval: humanIntervalToMS["15m"],
|
||||
},
|
||||
{
|
||||
name: "closest to 2d is 1d",
|
||||
allowedTimeGrains: []int64{},
|
||||
inputInterval: humanIntervalToMS["2d"],
|
||||
expectedInterval: humanIntervalToMS["1d"],
|
||||
},
|
||||
{
|
||||
name: "closest to 3m is 1d when 1d is only allowed interval",
|
||||
allowedTimeGrains: []int64{humanIntervalToMS["1d"]},
|
||||
inputInterval: humanIntervalToMS["2d"],
|
||||
expectedInterval: humanIntervalToMS["1d"],
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
interval := findClosestAllowedIntervalMS(tt.inputInterval, tt.allowedTimeGrains)
|
||||
require.Equal(t, tt.expectedInterval, interval)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func loadTestFile(name string) (AzureMonitorResponse, error) {
|
||||
var data AzureMonitorResponse
|
||||
var azData AzureMonitorResponse
|
||||
|
||||
path := filepath.Join("testdata", name)
|
||||
jsonBody, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return data, err
|
||||
return azData, err
|
||||
}
|
||||
err = json.Unmarshal(jsonBody, &data)
|
||||
return data, err
|
||||
err = json.Unmarshal(jsonBody, &azData)
|
||||
return azData, err
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ import {
|
||||
import { toDataQueryError } from '@grafana/runtime';
|
||||
import { emitDataRequestEvent } from './analyticsProcessor';
|
||||
import { ExpressionDatasourceID, expressionDatasource } from 'app/features/expressions/ExpressionDatasource';
|
||||
import { ExpressionQuery } from 'app/features/expressions/types';
|
||||
|
||||
type MapOfResponsePackets = { [str: string]: DataQueryResponse };
|
||||
|
||||
@ -145,7 +146,7 @@ export function callQueryMethod(datasource: DataSourceApi, request: DataQueryReq
|
||||
// If any query has an expression, use the expression endpoint
|
||||
for (const target of request.targets) {
|
||||
if (target.datasource === ExpressionDatasourceID) {
|
||||
return expressionDatasource.query(request);
|
||||
return expressionDatasource.query(request as DataQueryRequest<ExpressionQuery>);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,29 +1,36 @@
|
||||
import AzureMonitorDatasource from '../datasource';
|
||||
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { toUtc, DataFrame, getFrameDisplayName } from '@grafana/data';
|
||||
import { DataSourceInstanceSettings } from '@grafana/data';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
import { AzureDataSourceJsonData } from '../types';
|
||||
|
||||
const templateSrv = new TemplateSrv();
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...jest.requireActual('@grafana/runtime'),
|
||||
getBackendSrv: () => backendSrv,
|
||||
getTemplateSrv: () => templateSrv,
|
||||
}));
|
||||
|
||||
interface TestContext {
|
||||
instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>;
|
||||
ds: AzureMonitorDatasource;
|
||||
}
|
||||
|
||||
describe('AzureMonitorDatasource', () => {
|
||||
const ctx: any = {
|
||||
templateSrv: new TemplateSrv(),
|
||||
};
|
||||
const ctx: TestContext = {} as TestContext;
|
||||
const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
ctx.instanceSettings = {
|
||||
ctx.instanceSettings = ({
|
||||
name: 'test',
|
||||
url: 'http://azuremonitor.com',
|
||||
jsonData: { subscriptionId: '9935389e-9122-4ef9-95f9-1513dd24753f' },
|
||||
cloudName: 'azuremonitor',
|
||||
};
|
||||
|
||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, ctx.templateSrv);
|
||||
} as unknown) as DataSourceInstanceSettings<AzureDataSourceJsonData>;
|
||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, templateSrv);
|
||||
});
|
||||
|
||||
describe('When performing testDatasource', () => {
|
||||
@ -78,74 +85,6 @@ describe('AzureMonitorDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing query', () => {
|
||||
const options = {
|
||||
range: {
|
||||
from: toUtc('2017-08-22T20:00:00Z'),
|
||||
to: toUtc('2017-08-22T23:59:00Z'),
|
||||
},
|
||||
targets: [
|
||||
{
|
||||
apiVersion: '2018-01-01',
|
||||
refId: 'A',
|
||||
queryType: 'Azure Monitor',
|
||||
azureMonitor: {
|
||||
resourceGroup: 'testRG',
|
||||
resourceName: 'testRN',
|
||||
metricDefinition: 'Microsoft.Compute/virtualMachines',
|
||||
metricNamespace: 'default',
|
||||
metricName: 'Percentage CPU',
|
||||
timeGrain: 'PT1H',
|
||||
alias: '{{metric}}',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const response: any = {
|
||||
results: {
|
||||
A: {
|
||||
refId: 'A',
|
||||
meta: {
|
||||
rawQuery:
|
||||
'aggregation=Average&api-version=2018-01-01&interval=PT1M' +
|
||||
'&metricnames=Percentage+CPU×pan=2019-05-19T15%3A11%3A37Z%2F2019-05-19T21%3A11%3A37Z',
|
||||
unit: 'Percent',
|
||||
},
|
||||
series: [
|
||||
{
|
||||
name: 'Percentage CPU',
|
||||
points: [
|
||||
[2.2075, 1558278660000],
|
||||
[2.29, 1558278720000],
|
||||
],
|
||||
},
|
||||
],
|
||||
tables: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||
expect(options.url).toContain('/api/tsdb/query');
|
||||
return Promise.resolve({ data: response, status: 200 });
|
||||
});
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds.query(options).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(getFrameDisplayName(data)).toEqual('Percentage CPU');
|
||||
expect(data.fields[0].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[1].values.get(0)).toEqual(2.2075);
|
||||
expect(data.fields[0].values.get(1)).toEqual(1558278720000);
|
||||
expect(data.fields[1].values.get(1)).toEqual(2.29);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing metricFindQuery', () => {
|
||||
describe('with a subscriptions query', () => {
|
||||
const response = {
|
||||
@ -626,7 +565,7 @@ describe('AzureMonitorDatasource', () => {
|
||||
});
|
||||
|
||||
it('should return list of Resource Groups', () => {
|
||||
return ctx.ds.getResourceGroups().then((results: Array<{ text: string; value: string }>) => {
|
||||
return ctx.ds.getResourceGroups('subscriptionId').then((results: Array<{ text: string; value: string }>) => {
|
||||
expect(results.length).toEqual(2);
|
||||
expect(results[0].text).toEqual('grp1');
|
||||
expect(results[0].value).toEqual('grp1');
|
||||
|
@ -1,4 +1,4 @@
|
||||
import _ from 'lodash';
|
||||
import { filter, startsWith } from 'lodash';
|
||||
import UrlBuilder from './url_builder';
|
||||
import ResponseParser from './response_parser';
|
||||
import SupportedNamespaces from './supported_namespaces';
|
||||
@ -9,31 +9,25 @@ import {
|
||||
AzureMonitorMetricDefinitionsResponse,
|
||||
AzureMonitorResourceGroupsResponse,
|
||||
} from '../types';
|
||||
import { DataQueryRequest, DataQueryResponseData, DataSourceInstanceSettings } from '@grafana/data';
|
||||
import { DataSourceInstanceSettings, ScopedVars } from '@grafana/data';
|
||||
import { getBackendSrv, DataSourceWithBackend, getTemplateSrv } from '@grafana/runtime';
|
||||
|
||||
import { TimeSeries, toDataFrame } from '@grafana/data';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
const defaultDropdownValue = 'select';
|
||||
|
||||
export default class AzureMonitorDatasource {
|
||||
export default class AzureMonitorDatasource extends DataSourceWithBackend<AzureMonitorQuery, AzureDataSourceJsonData> {
|
||||
apiVersion = '2018-01-01';
|
||||
apiPreviewVersion = '2017-12-01-preview';
|
||||
id: number;
|
||||
subscriptionId: string;
|
||||
baseUrl: string;
|
||||
resourceGroup: string;
|
||||
resourceName: string;
|
||||
url: string;
|
||||
defaultDropdownValue = 'select';
|
||||
cloudName: string;
|
||||
supportedMetricNamespaces: string[] = [];
|
||||
|
||||
/** @ngInject */
|
||||
constructor(
|
||||
private instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>,
|
||||
private templateSrv: TemplateSrv
|
||||
) {
|
||||
this.id = instanceSettings.id;
|
||||
constructor(private instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) {
|
||||
super(instanceSettings);
|
||||
|
||||
this.subscriptionId = instanceSettings.jsonData.subscriptionId;
|
||||
this.cloudName = instanceSettings.jsonData.cloudName || 'azuremonitor';
|
||||
this.baseUrl = `/${this.cloudName}/subscriptions`;
|
||||
@ -46,100 +40,63 @@ export default class AzureMonitorDatasource {
|
||||
return !!this.subscriptionId && this.subscriptionId.length > 0;
|
||||
}
|
||||
|
||||
async query(options: DataQueryRequest<AzureMonitorQuery>): Promise<DataQueryResponseData[]> {
|
||||
const queries = _.filter(options.targets, item => {
|
||||
return (
|
||||
item.hide !== true &&
|
||||
item.azureMonitor.resourceGroup &&
|
||||
item.azureMonitor.resourceGroup !== this.defaultDropdownValue &&
|
||||
item.azureMonitor.resourceName &&
|
||||
item.azureMonitor.resourceName !== this.defaultDropdownValue &&
|
||||
item.azureMonitor.metricDefinition &&
|
||||
item.azureMonitor.metricDefinition !== this.defaultDropdownValue &&
|
||||
item.azureMonitor.metricName &&
|
||||
item.azureMonitor.metricName !== this.defaultDropdownValue
|
||||
);
|
||||
}).map(target => {
|
||||
const item = target.azureMonitor;
|
||||
|
||||
// fix for timeGrainUnit which is a deprecated/removed field name
|
||||
if (item.timeGrainUnit && item.timeGrain !== 'auto') {
|
||||
item.timeGrain = TimegrainConverter.createISO8601Duration(item.timeGrain, item.timeGrainUnit);
|
||||
}
|
||||
|
||||
const subscriptionId = this.templateSrv.replace(target.subscription || this.subscriptionId, options.scopedVars);
|
||||
const resourceGroup = this.templateSrv.replace(item.resourceGroup, options.scopedVars);
|
||||
const resourceName = this.templateSrv.replace(item.resourceName, options.scopedVars);
|
||||
const metricNamespace = this.templateSrv.replace(item.metricNamespace, options.scopedVars);
|
||||
const metricDefinition = this.templateSrv.replace(item.metricDefinition, options.scopedVars);
|
||||
const timeGrain = this.templateSrv.replace((item.timeGrain || '').toString(), options.scopedVars);
|
||||
const aggregation = this.templateSrv.replace(item.aggregation, options.scopedVars);
|
||||
const top = this.templateSrv.replace(item.top || '', options.scopedVars);
|
||||
|
||||
return {
|
||||
refId: target.refId,
|
||||
intervalMs: options.intervalMs,
|
||||
datasourceId: this.id,
|
||||
subscription: subscriptionId,
|
||||
queryType: 'Azure Monitor',
|
||||
type: 'timeSeriesQuery',
|
||||
raw: false,
|
||||
azureMonitor: {
|
||||
resourceGroup: resourceGroup,
|
||||
resourceName: resourceName,
|
||||
metricDefinition: metricDefinition,
|
||||
timeGrain: timeGrain,
|
||||
allowedTimeGrainsMs: item.allowedTimeGrainsMs,
|
||||
metricName: this.templateSrv.replace(item.metricName, options.scopedVars),
|
||||
metricNamespace:
|
||||
metricNamespace && metricNamespace !== this.defaultDropdownValue ? metricNamespace : metricDefinition,
|
||||
aggregation: aggregation,
|
||||
dimension: this.templateSrv.replace(item.dimension, options.scopedVars),
|
||||
top: top || '10',
|
||||
dimensionFilter: this.templateSrv.replace(item.dimensionFilter, options.scopedVars),
|
||||
alias: item.alias,
|
||||
format: target.format,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
if (!queries || queries.length === 0) {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
const { data } = await getBackendSrv().datasourceRequest({
|
||||
url: '/api/tsdb/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries,
|
||||
},
|
||||
});
|
||||
|
||||
const result: DataQueryResponseData[] = [];
|
||||
if (data.results) {
|
||||
Object['values'](data.results).forEach((queryRes: any) => {
|
||||
if (!queryRes.series) {
|
||||
return;
|
||||
}
|
||||
queryRes.series.forEach((series: any) => {
|
||||
const timeSerie: TimeSeries = {
|
||||
target: series.name,
|
||||
datapoints: series.points,
|
||||
refId: queryRes.refId,
|
||||
meta: queryRes.meta,
|
||||
};
|
||||
result.push(toDataFrame(timeSerie));
|
||||
});
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
return Promise.resolve([]);
|
||||
filterQuery(item: AzureMonitorQuery): boolean {
|
||||
return (
|
||||
item.hide !== true &&
|
||||
item.azureMonitor.resourceGroup &&
|
||||
item.azureMonitor.resourceGroup !== defaultDropdownValue &&
|
||||
item.azureMonitor.resourceName &&
|
||||
item.azureMonitor.resourceName !== defaultDropdownValue &&
|
||||
item.azureMonitor.metricDefinition &&
|
||||
item.azureMonitor.metricDefinition !== defaultDropdownValue &&
|
||||
item.azureMonitor.metricName &&
|
||||
item.azureMonitor.metricName !== defaultDropdownValue
|
||||
);
|
||||
}
|
||||
|
||||
annotationQuery(options: any) {}
|
||||
applyTemplateVariables(target: AzureMonitorQuery, scopedVars: ScopedVars): Record<string, any> {
|
||||
const item = target.azureMonitor;
|
||||
|
||||
// fix for timeGrainUnit which is a deprecated/removed field name
|
||||
if (item.timeGrainUnit && item.timeGrain !== 'auto') {
|
||||
item.timeGrain = TimegrainConverter.createISO8601Duration(item.timeGrain, item.timeGrainUnit);
|
||||
}
|
||||
|
||||
const templateSrv = getTemplateSrv();
|
||||
|
||||
const subscriptionId = templateSrv.replace(target.subscription || this.subscriptionId, scopedVars);
|
||||
const resourceGroup = templateSrv.replace(item.resourceGroup, scopedVars);
|
||||
const resourceName = templateSrv.replace(item.resourceName, scopedVars);
|
||||
const metricNamespace = templateSrv.replace(item.metricNamespace, scopedVars);
|
||||
const metricDefinition = templateSrv.replace(item.metricDefinition, scopedVars);
|
||||
const timeGrain = templateSrv.replace((item.timeGrain || '').toString(), scopedVars);
|
||||
const aggregation = templateSrv.replace(item.aggregation, scopedVars);
|
||||
const top = templateSrv.replace(item.top || '', scopedVars);
|
||||
|
||||
return {
|
||||
refId: target.refId,
|
||||
subscription: subscriptionId,
|
||||
queryType: 'Azure Monitor',
|
||||
type: 'timeSeriesQuery',
|
||||
raw: false,
|
||||
azureMonitor: {
|
||||
resourceGroup,
|
||||
resourceName,
|
||||
metricDefinition,
|
||||
timeGrain,
|
||||
allowedTimeGrainsMs: item.allowedTimeGrainsMs,
|
||||
metricName: templateSrv.replace(item.metricName, scopedVars),
|
||||
metricNamespace:
|
||||
metricNamespace && metricNamespace !== defaultDropdownValue ? metricNamespace : metricDefinition,
|
||||
aggregation: aggregation,
|
||||
dimension: templateSrv.replace(item.dimension, scopedVars),
|
||||
top: top || '10',
|
||||
dimensionFilter: templateSrv.replace(item.dimensionFilter, scopedVars),
|
||||
alias: item.alias,
|
||||
format: target.format,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
metricFindQuery(query: string) {
|
||||
const subscriptionsQuery = query.match(/^Subscriptions\(\)/i);
|
||||
@ -234,7 +191,7 @@ export default class AzureMonitorDatasource {
|
||||
}
|
||||
|
||||
toVariable(metric: string) {
|
||||
return this.templateSrv.replace((metric || '').trim());
|
||||
return getTemplateSrv().replace((metric || '').trim());
|
||||
}
|
||||
|
||||
getSubscriptions(route?: string) {
|
||||
@ -258,7 +215,7 @@ export default class AzureMonitorDatasource {
|
||||
return ResponseParser.parseResponseValues(result, 'type', 'type');
|
||||
})
|
||||
.then((result: any) => {
|
||||
return _.filter(result, t => {
|
||||
return filter(result, t => {
|
||||
for (let i = 0; i < this.supportedMetricNamespaces.length; i++) {
|
||||
if (t.value.toLowerCase() === this.supportedMetricNamespaces[i].toLowerCase()) {
|
||||
return true;
|
||||
@ -304,7 +261,7 @@ export default class AzureMonitorDatasource {
|
||||
const url = `${this.baseUrl}/${subscriptionId}/resourceGroups/${resourceGroup}/resources?api-version=${this.apiVersion}`;
|
||||
|
||||
return this.doRequest(url).then((result: any) => {
|
||||
if (!_.startsWith(metricDefinition, 'Microsoft.Storage/storageAccounts/')) {
|
||||
if (!startsWith(metricDefinition, 'Microsoft.Storage/storageAccounts/')) {
|
||||
return ResponseParser.parseResourceNames(result, metricDefinition);
|
||||
}
|
||||
|
||||
@ -378,19 +335,19 @@ export default class AzureMonitorDatasource {
|
||||
});
|
||||
}
|
||||
|
||||
testDatasource() {
|
||||
testDatasource(): Promise<any> {
|
||||
if (!this.isValidConfigField(this.instanceSettings.jsonData.tenantId)) {
|
||||
return {
|
||||
return Promise.resolve({
|
||||
status: 'error',
|
||||
message: 'The Tenant Id field is required.',
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if (!this.isValidConfigField(this.instanceSettings.jsonData.clientId)) {
|
||||
return {
|
||||
return Promise.resolve({
|
||||
status: 'error',
|
||||
message: 'The Client Id field is required.',
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
const url = `/${this.cloudName}/subscriptions?api-version=2019-03-01`;
|
||||
|
@ -3,8 +3,15 @@ import AzureMonitorDatasource from './azure_monitor/azure_monitor_datasource';
|
||||
import AppInsightsDatasource from './app_insights/app_insights_datasource';
|
||||
import AzureLogAnalyticsDatasource from './azure_log_analytics/azure_log_analytics_datasource';
|
||||
import { AzureMonitorQuery, AzureDataSourceJsonData } from './types';
|
||||
import { DataSourceApi, DataQueryRequest, DataSourceInstanceSettings } from '@grafana/data';
|
||||
import {
|
||||
DataSourceApi,
|
||||
DataQueryRequest,
|
||||
DataSourceInstanceSettings,
|
||||
DataQueryResponse,
|
||||
DataQueryResponseData,
|
||||
} from '@grafana/data';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { Observable } from 'rxjs';
|
||||
|
||||
export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDataSourceJsonData> {
|
||||
azureMonitorDatasource: AzureMonitorDatasource;
|
||||
@ -14,13 +21,12 @@ export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDa
|
||||
/** @ngInject */
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>, private templateSrv: TemplateSrv) {
|
||||
super(instanceSettings);
|
||||
this.azureMonitorDatasource = new AzureMonitorDatasource(instanceSettings, this.templateSrv);
|
||||
this.azureMonitorDatasource = new AzureMonitorDatasource(instanceSettings);
|
||||
this.appInsightsDatasource = new AppInsightsDatasource(instanceSettings, this.templateSrv);
|
||||
|
||||
this.azureLogAnalyticsDatasource = new AzureLogAnalyticsDatasource(instanceSettings, this.templateSrv);
|
||||
}
|
||||
|
||||
async query(options: DataQueryRequest<AzureMonitorQuery>) {
|
||||
query(options: DataQueryRequest<AzureMonitorQuery>): Promise<DataQueryResponse> | Observable<DataQueryResponseData> {
|
||||
const promises: any[] = [];
|
||||
const azureMonitorOptions = _.cloneDeep(options);
|
||||
const appInsightsOptions = _.cloneDeep(options);
|
||||
@ -30,13 +36,6 @@ export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDa
|
||||
appInsightsOptions.targets = _.filter(appInsightsOptions.targets, ['queryType', 'Application Insights']);
|
||||
azureLogAnalyticsOptions.targets = _.filter(azureLogAnalyticsOptions.targets, ['queryType', 'Azure Log Analytics']);
|
||||
|
||||
if (azureMonitorOptions.targets.length > 0) {
|
||||
const amPromise = this.azureMonitorDatasource.query(azureMonitorOptions);
|
||||
if (amPromise) {
|
||||
promises.push(amPromise);
|
||||
}
|
||||
}
|
||||
|
||||
if (appInsightsOptions.targets.length > 0) {
|
||||
const aiPromise = this.appInsightsDatasource.query(appInsightsOptions);
|
||||
if (aiPromise) {
|
||||
@ -51,6 +50,16 @@ export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDa
|
||||
}
|
||||
}
|
||||
|
||||
if (azureMonitorOptions.targets.length > 0) {
|
||||
const obs = this.azureMonitorDatasource.query(azureMonitorOptions);
|
||||
if (!promises.length) {
|
||||
return obs; // return the observable directly
|
||||
}
|
||||
// NOTE: this only includes the data!
|
||||
// When all three query types are ready to be observale, they should all use observable
|
||||
promises.push(obs.toPromise().then(r => r.data));
|
||||
}
|
||||
|
||||
if (promises.length === 0) {
|
||||
return Promise.resolve({ data: [] });
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user