mirror of
https://github.com/grafana/grafana.git
synced 2024-11-27 03:11:01 -06:00
Merge pull request #15353 from grafana/14623-azure-monitor-alerting
Adds alerting for the Azure Monitor API in the Azure Monitor datasource
This commit is contained in:
commit
1c364b57b5
@ -19,6 +19,7 @@ import (
|
||||
_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
|
||||
_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/azuremonitor"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/cloudwatch"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/elasticsearch"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/graphite"
|
||||
|
@ -23,7 +23,7 @@ const (
|
||||
DS_ACCESS_DIRECT = "direct"
|
||||
DS_ACCESS_PROXY = "proxy"
|
||||
DS_STACKDRIVER = "stackdriver"
|
||||
DS_AZURE_MONITOR = "azure-monitor"
|
||||
DS_AZURE_MONITOR = "grafana-azure-monitor-datasource"
|
||||
)
|
||||
|
||||
var (
|
||||
|
312
pkg/tsdb/azuremonitor/azuremonitor-datasource.go
Normal file
312
pkg/tsdb/azuremonitor/azuremonitor-datasource.go
Normal file
@ -0,0 +1,312 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
opentracing "github.com/opentracing/opentracing-go"
|
||||
"golang.org/x/net/context/ctxhttp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
|
||||
type AzureMonitorDatasource struct {
|
||||
httpClient *http.Client
|
||||
dsInfo *models.DataSource
|
||||
}
|
||||
|
||||
var (
|
||||
// 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds
|
||||
allowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000}
|
||||
)
|
||||
|
||||
// executeTimeSeriesQuery does the following:
|
||||
// 1. build the AzureMonitor url and querystring for each query
|
||||
// 2. executes each query by calling the Azure Monitor API
|
||||
// 3. parses the responses for each query into the timeseries format
|
||||
func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{
|
||||
Results: map[string]*tsdb.QueryResult{},
|
||||
}
|
||||
|
||||
queries, err := e.buildQueries(originalQueries, timeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
queryRes, resp, err := e.executeQuery(ctx, query, originalQueries, timeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// azlog.Debug("AzureMonitor", "Response", resp)
|
||||
|
||||
err = e.parseResponse(queryRes, resp, query)
|
||||
if err != nil {
|
||||
queryRes.Error = err
|
||||
}
|
||||
result.Results[query.RefID] = queryRes
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*AzureMonitorQuery, error) {
|
||||
azureMonitorQueries := []*AzureMonitorQuery{}
|
||||
startTime, err := timeRange.ParseFrom()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
endTime, err := timeRange.ParseTo()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
var target string
|
||||
|
||||
azureMonitorTarget := query.Model.Get("azureMonitor").MustMap()
|
||||
azlog.Debug("AzureMonitor", "target", azureMonitorTarget)
|
||||
|
||||
urlComponents := map[string]string{}
|
||||
urlComponents["resourceGroup"] = fmt.Sprintf("%v", azureMonitorTarget["resourceGroup"])
|
||||
urlComponents["metricDefinition"] = fmt.Sprintf("%v", azureMonitorTarget["metricDefinition"])
|
||||
urlComponents["resourceName"] = fmt.Sprintf("%v", azureMonitorTarget["resourceName"])
|
||||
|
||||
ub := urlBuilder{
|
||||
ResourceGroup: urlComponents["resourceGroup"],
|
||||
MetricDefinition: urlComponents["metricDefinition"],
|
||||
ResourceName: urlComponents["resourceName"],
|
||||
}
|
||||
azureURL := ub.Build()
|
||||
|
||||
alias := fmt.Sprintf("%v", azureMonitorTarget["alias"])
|
||||
|
||||
timeGrain := fmt.Sprintf("%v", azureMonitorTarget["timeGrain"])
|
||||
if timeGrain == "auto" {
|
||||
autoInterval := e.findClosestAllowedIntervalMS(query.IntervalMs)
|
||||
tg := &TimeGrain{}
|
||||
timeGrain, err = tg.createISO8601DurationFromIntervalMS(autoInterval)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
params := url.Values{}
|
||||
params.Add("api-version", "2018-01-01")
|
||||
params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339)))
|
||||
params.Add("interval", timeGrain)
|
||||
params.Add("aggregation", fmt.Sprintf("%v", azureMonitorTarget["aggregation"]))
|
||||
params.Add("metricnames", fmt.Sprintf("%v", azureMonitorTarget["metricName"]))
|
||||
|
||||
dimension := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimension"]))
|
||||
dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", azureMonitorTarget["dimensionFilter"]))
|
||||
if azureMonitorTarget["dimension"] != nil && azureMonitorTarget["dimensionFilter"] != nil && len(dimension) > 0 && len(dimensionFilter) > 0 {
|
||||
params.Add("$filter", fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
|
||||
}
|
||||
|
||||
target = params.Encode()
|
||||
|
||||
if setting.Env == setting.DEV {
|
||||
azlog.Debug("Azuremonitor request", "params", params)
|
||||
}
|
||||
|
||||
azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{
|
||||
URL: azureURL,
|
||||
UrlComponents: urlComponents,
|
||||
Target: target,
|
||||
Params: params,
|
||||
RefID: query.RefId,
|
||||
Alias: alias,
|
||||
})
|
||||
}
|
||||
|
||||
return azureMonitorQueries, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.QueryResult, AzureMonitorResponse, error) {
|
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
|
||||
|
||||
req, err := e.createRequest(ctx, e.dsInfo)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, AzureMonitorResponse{}, nil
|
||||
}
|
||||
|
||||
req.URL.Path = path.Join(req.URL.Path, query.URL)
|
||||
req.URL.RawQuery = query.Params.Encode()
|
||||
queryResult.Meta.Set("rawQuery", req.URL.RawQuery)
|
||||
|
||||
span, ctx := opentracing.StartSpanFromContext(ctx, "azuremonitor query")
|
||||
span.SetTag("target", query.Target)
|
||||
span.SetTag("from", timeRange.From)
|
||||
span.SetTag("until", timeRange.To)
|
||||
span.SetTag("datasource_id", e.dsInfo.Id)
|
||||
span.SetTag("org_id", e.dsInfo.OrgId)
|
||||
|
||||
defer span.Finish()
|
||||
|
||||
opentracing.GlobalTracer().Inject(
|
||||
span.Context(),
|
||||
opentracing.HTTPHeaders,
|
||||
opentracing.HTTPHeadersCarrier(req.Header))
|
||||
|
||||
azlog.Debug("AzureMonitor", "Request URL", req.URL.String())
|
||||
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, AzureMonitorResponse{}, nil
|
||||
}
|
||||
|
||||
data, err := e.unmarshalResponse(res)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, AzureMonitorResponse{}, nil
|
||||
}
|
||||
|
||||
return queryResult, data, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
||||
// find plugin
|
||||
plugin, ok := plugins.DataSources[dsInfo.Type]
|
||||
if !ok {
|
||||
return nil, errors.New("Unable to find datasource plugin Azure Monitor")
|
||||
}
|
||||
|
||||
var azureMonitorRoute *plugins.AppPluginRoute
|
||||
for _, route := range plugin.Routes {
|
||||
if route.Path == "azuremonitor" {
|
||||
azureMonitorRoute = route
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
cloudName := dsInfo.JsonData.Get("cloudName").MustString("azuremonitor")
|
||||
subscriptionID := dsInfo.JsonData.Get("subscriptionId").MustString()
|
||||
proxyPass := fmt.Sprintf("%s/subscriptions/%s", cloudName, subscriptionID)
|
||||
|
||||
u, _ := url.Parse(dsInfo.Url)
|
||||
u.Path = path.Join(u.Path, "render")
|
||||
|
||||
req, err := http.NewRequest(http.MethodGet, u.String(), nil)
|
||||
if err != nil {
|
||||
azlog.Error("Failed to create request", "error", err)
|
||||
return nil, fmt.Errorf("Failed to create request. error: %v", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
|
||||
|
||||
pluginproxy.ApplyRoute(ctx, req, proxyPass, azureMonitorRoute, dsInfo)
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMonitorResponse, error) {
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
defer res.Body.Close()
|
||||
if err != nil {
|
||||
return AzureMonitorResponse{}, err
|
||||
}
|
||||
|
||||
if res.StatusCode/100 != 2 {
|
||||
azlog.Error("Request failed", "status", res.Status, "body", string(body))
|
||||
return AzureMonitorResponse{}, fmt.Errorf(string(body))
|
||||
}
|
||||
|
||||
var data AzureMonitorResponse
|
||||
err = json.Unmarshal(body, &data)
|
||||
if err != nil {
|
||||
azlog.Error("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
|
||||
return AzureMonitorResponse{}, err
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data AzureMonitorResponse, query *AzureMonitorQuery) error {
|
||||
if len(data.Value) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, series := range data.Value[0].Timeseries {
|
||||
points := []tsdb.TimePoint{}
|
||||
|
||||
metadataName := ""
|
||||
metadataValue := ""
|
||||
if len(series.Metadatavalues) > 0 {
|
||||
metadataName = series.Metadatavalues[0].Name.LocalizedValue
|
||||
metadataValue = series.Metadatavalues[0].Value
|
||||
}
|
||||
defaultMetricName := formatLegendKey(query.UrlComponents["resourceName"], data.Value[0].Name.LocalizedValue, metadataName, metadataValue)
|
||||
|
||||
for _, point := range series.Data {
|
||||
var value float64
|
||||
switch query.Params.Get("aggregation") {
|
||||
case "Average":
|
||||
value = point.Average
|
||||
case "Total":
|
||||
value = point.Total
|
||||
case "Maximum":
|
||||
value = point.Maximum
|
||||
case "Minimum":
|
||||
value = point.Minimum
|
||||
case "Count":
|
||||
value = point.Count
|
||||
default:
|
||||
value = point.Count
|
||||
}
|
||||
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.TimeStamp).Unix())*1000))
|
||||
}
|
||||
|
||||
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
|
||||
Name: defaultMetricName,
|
||||
Points: points,
|
||||
})
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// findClosestAllowedIntervalMs is used for the auto time grain setting.
|
||||
// It finds the closest time grain from the list of allowed time grains for Azure Monitor
|
||||
// using the Grafana interval in milliseconds
|
||||
func (e *AzureMonitorDatasource) findClosestAllowedIntervalMS(intervalMs int64) int64 {
|
||||
closest := allowedIntervalsMS[0]
|
||||
|
||||
for i, allowed := range allowedIntervalsMS {
|
||||
if intervalMs > allowed {
|
||||
if i+1 < len(allowedIntervalsMS) {
|
||||
closest = allowedIntervalsMS[i+1]
|
||||
} else {
|
||||
closest = allowed
|
||||
}
|
||||
}
|
||||
}
|
||||
return closest
|
||||
}
|
||||
|
||||
// formatLegendKey builds the legend key or timeseries name
|
||||
func formatLegendKey(resourceName string, metricName string, metadataName string, metadataValue string) string {
|
||||
if len(metadataName) > 0 {
|
||||
return fmt.Sprintf("%s{%s=%s}.%s", resourceName, metadataName, metadataValue, metricName)
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", resourceName, metricName)
|
||||
}
|
264
pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go
Normal file
264
pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go
Normal file
@ -0,0 +1,264 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestAzureMonitorDatasource(t *testing.T) {
|
||||
Convey("AzureMonitorDatasource", t, func() {
|
||||
datasource := &AzureMonitorDatasource{}
|
||||
|
||||
Convey("Parse queries from frontend and build AzureMonitor API queries", func() {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
tsdbQuery := &tsdb.TsdbQuery{
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureMonitor": map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
},
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
Convey("and is a normal query", func() {
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].RefID, ShouldEqual, "A")
|
||||
So(queries[0].URL, ShouldEqual, "resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics")
|
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
|
||||
So(len(queries[0].Params), ShouldEqual, 5)
|
||||
So(queries[0].Params["timespan"][0], ShouldEqual, "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z")
|
||||
So(queries[0].Params["api-version"][0], ShouldEqual, "2018-01-01")
|
||||
So(queries[0].Params["aggregation"][0], ShouldEqual, "Average")
|
||||
So(queries[0].Params["metricnames"][0], ShouldEqual, "Percentage CPU")
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT1M")
|
||||
So(queries[0].Alias, ShouldEqual, "testalias")
|
||||
})
|
||||
|
||||
Convey("and has a dimension filter", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"azureMonitor": map[string]interface{}{
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"resourceGroup": "grafanastaging",
|
||||
"resourceName": "grafana",
|
||||
"metricDefinition": "Microsoft.Compute/virtualMachines",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Azure Monitor",
|
||||
"dimension": "blob",
|
||||
"dimensionFilter": "*",
|
||||
},
|
||||
})
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Target, ShouldEqual, "%24filter=blob+eq+%27%2A%27&aggregation=Average&api-version=2018-01-01&interval=PT1M&metricnames=Percentage+CPU×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
|
||||
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Parse AzureMonitor API response in the time series format", func() {
|
||||
Convey("when data from query aggregated as average to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/1-azure-monitor-response-avg.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(data.Interval, ShouldEqual, "PT1M")
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Average"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(res.Series), ShouldEqual, 1)
|
||||
So(res.Series[0].Name, ShouldEqual, "grafana.Percentage CPU")
|
||||
So(len(res.Series[0].Points), ShouldEqual, 5)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 2.0875)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549620780000)
|
||||
|
||||
So(res.Series[0].Points[1][0].Float64, ShouldEqual, 2.1525)
|
||||
So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1549620840000)
|
||||
|
||||
So(res.Series[0].Points[2][0].Float64, ShouldEqual, 2.155)
|
||||
So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1549620900000)
|
||||
|
||||
So(res.Series[0].Points[3][0].Float64, ShouldEqual, 3.6925)
|
||||
So(res.Series[0].Points[3][1].Float64, ShouldEqual, 1549620960000)
|
||||
|
||||
So(res.Series[0].Points[4][0].Float64, ShouldEqual, 2.44)
|
||||
So(res.Series[0].Points[4][1].Float64, ShouldEqual, 1549621020000)
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as total to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/2-azure-monitor-response-total.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Total"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 8.26)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549718940000)
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as maximum to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/3-azure-monitor-response-maximum.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Maximum"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3.07)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549722360000)
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as minimum to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/4-azure-monitor-response-minimum.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Minimum"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 1.51)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549723380000)
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as Count to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/5-azure-monitor-response-count.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Count"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 4)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1549723440000)
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as total and has dimension filter", func() {
|
||||
data, err := loadTestFile("./test-data/6-azure-monitor-response-multi-dimension.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &AzureMonitorQuery{
|
||||
UrlComponents: map[string]string{
|
||||
"resourceName": "grafana",
|
||||
},
|
||||
Params: url.Values{
|
||||
"aggregation": {"Average"},
|
||||
},
|
||||
}
|
||||
err = datasource.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
So(len(res.Series), ShouldEqual, 3)
|
||||
|
||||
So(res.Series[0].Name, ShouldEqual, "grafana{blobtype=PageBlob}.Blob Count")
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 3)
|
||||
|
||||
So(res.Series[1].Name, ShouldEqual, "grafana{blobtype=BlockBlob}.Blob Count")
|
||||
So(res.Series[1].Points[0][0].Float64, ShouldEqual, 1)
|
||||
|
||||
So(res.Series[2].Name, ShouldEqual, "grafana{blobtype=Azure Data Lake Storage}.Blob Count")
|
||||
So(res.Series[2].Points[0][0].Float64, ShouldEqual, 0)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Find closest allowed interval for auto time grain", func() {
|
||||
intervals := map[string]int64{
|
||||
"3m": 180000,
|
||||
"5m": 300000,
|
||||
"10m": 600000,
|
||||
"15m": 900000,
|
||||
"1d": 86400000,
|
||||
"2d": 172800000,
|
||||
}
|
||||
|
||||
closest := datasource.findClosestAllowedIntervalMS(intervals["3m"])
|
||||
So(closest, ShouldEqual, intervals["5m"])
|
||||
|
||||
closest = datasource.findClosestAllowedIntervalMS(intervals["10m"])
|
||||
So(closest, ShouldEqual, intervals["15m"])
|
||||
|
||||
closest = datasource.findClosestAllowedIntervalMS(intervals["2d"])
|
||||
So(closest, ShouldEqual, intervals["1d"])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func loadTestFile(path string) (AzureMonitorResponse, error) {
|
||||
var data AzureMonitorResponse
|
||||
|
||||
jsonBody, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
err = json.Unmarshal(jsonBody, &data)
|
||||
return data, err
|
||||
}
|
70
pkg/tsdb/azuremonitor/azuremonitor.go
Normal file
70
pkg/tsdb/azuremonitor/azuremonitor.go
Normal file
@ -0,0 +1,70 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
var (
|
||||
azlog log.Logger
|
||||
)
|
||||
|
||||
// AzureMonitorExecutor executes queries for the Azure Monitor datasource - all four services
|
||||
type AzureMonitorExecutor struct {
|
||||
httpClient *http.Client
|
||||
dsInfo *models.DataSource
|
||||
}
|
||||
|
||||
// NewAzureMonitorExecutor initializes a http client
|
||||
func NewAzureMonitorExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
|
||||
httpClient, err := dsInfo.GetHttpClient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AzureMonitorExecutor{
|
||||
httpClient: httpClient,
|
||||
dsInfo: dsInfo,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
azlog = log.New("tsdb.azuremonitor")
|
||||
tsdb.RegisterTsdbQueryEndpoint("grafana-azure-monitor-datasource", NewAzureMonitorExecutor)
|
||||
}
|
||||
|
||||
// Query takes in the frontend queries, parses them into the query format
|
||||
// expected by chosen Azure Monitor service (Azure Monitor, App Insights etc.)
|
||||
// executes the queries against the API and parses the response into
|
||||
// the right format
|
||||
func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
var result *tsdb.Response
|
||||
var err error
|
||||
|
||||
var azureMonitorQueries []*tsdb.Query
|
||||
|
||||
for _, query := range tsdbQuery.Queries {
|
||||
queryType := query.Model.Get("queryType").MustString("")
|
||||
|
||||
switch queryType {
|
||||
case "Azure Monitor":
|
||||
azureMonitorQueries = append(azureMonitorQueries, query)
|
||||
default:
|
||||
return nil, fmt.Errorf("Alerting not supported for %s", queryType)
|
||||
}
|
||||
}
|
||||
|
||||
azDatasource := &AzureMonitorDatasource{
|
||||
httpClient: e.httpClient,
|
||||
dsInfo: e.dsInfo,
|
||||
}
|
||||
|
||||
result, err = azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, tsdbQuery.TimeRange)
|
||||
|
||||
return result, err
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
{
|
||||
"cost": 0,
|
||||
"timespan": "2019-02-08T10:13:50Z\/2019-02-08T16:13:50Z",
|
||||
"interval": "PT1M",
|
||||
"value": [
|
||||
{
|
||||
"id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU",
|
||||
"type": "Microsoft.Insights\/metrics",
|
||||
"name": {
|
||||
"value": "Percentage CPU",
|
||||
"localizedValue": "Percentage CPU"
|
||||
},
|
||||
"unit": "Percent",
|
||||
"timeseries": [
|
||||
{
|
||||
"metadatavalues": [
|
||||
|
||||
],
|
||||
"data": [
|
||||
{
|
||||
"timeStamp": "2019-02-08T10:13:00Z",
|
||||
"average": 2.0875
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-08T10:14:00Z",
|
||||
"average": 2.1525
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-08T10:15:00Z",
|
||||
"average": 2.155
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-08T10:16:00Z",
|
||||
"average": 3.6925
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-08T10:17:00Z",
|
||||
"average": 2.44
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"namespace": "Microsoft.Compute\/virtualMachines",
|
||||
"resourceregion": "westeurope"
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
{
|
||||
"cost": 0,
|
||||
"timespan": "2019-02-09T13:29:41Z\/2019-02-09T19:29:41Z",
|
||||
"interval": "PT1M",
|
||||
"value": [
|
||||
{
|
||||
"id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU",
|
||||
"type": "Microsoft.Insights\/metrics",
|
||||
"name": {
|
||||
"value": "Percentage CPU",
|
||||
"localizedValue": "Percentage CPU"
|
||||
},
|
||||
"unit": "Percent",
|
||||
"timeseries": [
|
||||
{
|
||||
"metadatavalues": [
|
||||
|
||||
],
|
||||
"data": [
|
||||
{
|
||||
"timeStamp": "2019-02-09T13:29:00Z",
|
||||
"total": 8.26
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T13:30:00Z",
|
||||
"total": 8.7
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T13:31:00Z",
|
||||
"total": 14.82
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T13:32:00Z",
|
||||
"total": 10.07
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T13:33:00Z",
|
||||
"total": 8.52
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"namespace": "Microsoft.Compute\/virtualMachines",
|
||||
"resourceregion": "westeurope"
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
{
|
||||
"cost": 0,
|
||||
"timespan": "2019-02-09T14:26:12Z\/2019-02-09T20:26:12Z",
|
||||
"interval": "PT1M",
|
||||
"value": [
|
||||
{
|
||||
"id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU",
|
||||
"type": "Microsoft.Insights\/metrics",
|
||||
"name": {
|
||||
"value": "Percentage CPU",
|
||||
"localizedValue": "Percentage CPU"
|
||||
},
|
||||
"unit": "Percent",
|
||||
"timeseries": [
|
||||
{
|
||||
"metadatavalues": [
|
||||
|
||||
],
|
||||
"data": [
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:26:00Z",
|
||||
"maximum": 3.07
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:27:00Z",
|
||||
"maximum": 2.92
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:28:00Z",
|
||||
"maximum": 2.87
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:29:00Z",
|
||||
"maximum": 2.27
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:30:00Z",
|
||||
"maximum": 2.52
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"namespace": "Microsoft.Compute\/virtualMachines",
|
||||
"resourceregion": "westeurope"
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
{
|
||||
"cost": 0,
|
||||
"timespan": "2019-02-09T14:43:21Z\/2019-02-09T20:43:21Z",
|
||||
"interval": "PT1M",
|
||||
"value": [
|
||||
{
|
||||
"id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU",
|
||||
"type": "Microsoft.Insights\/metrics",
|
||||
"name": {
|
||||
"value": "Percentage CPU",
|
||||
"localizedValue": "Percentage CPU"
|
||||
},
|
||||
"unit": "Percent",
|
||||
"timeseries": [
|
||||
{
|
||||
"metadatavalues": [
|
||||
|
||||
],
|
||||
"data": [
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:43:00Z",
|
||||
"minimum": 1.51
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:44:00Z",
|
||||
"minimum": 2.38
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:45:00Z",
|
||||
"minimum": 1.69
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:46:00Z",
|
||||
"minimum": 2.27
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:47:00Z",
|
||||
"minimum": 1.96
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"namespace": "Microsoft.Compute\/virtualMachines",
|
||||
"resourceregion": "westeurope"
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
{
|
||||
"cost": 0,
|
||||
"timespan": "2019-02-09T14:44:52Z\/2019-02-09T20:44:52Z",
|
||||
"interval": "PT1M",
|
||||
"value": [
|
||||
{
|
||||
"id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Compute\/virtualMachines\/grafana\/providers\/Microsoft.Insights\/metrics\/Percentage CPU",
|
||||
"type": "Microsoft.Insights\/metrics",
|
||||
"name": {
|
||||
"value": "Percentage CPU",
|
||||
"localizedValue": "Percentage CPU"
|
||||
},
|
||||
"unit": "Percent",
|
||||
"timeseries": [
|
||||
{
|
||||
"metadatavalues": [
|
||||
|
||||
],
|
||||
"data": [
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:44:00Z",
|
||||
"count": 4
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:45:00Z",
|
||||
"count": 4
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:46:00Z",
|
||||
"count": 4
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:47:00Z",
|
||||
"count": 4
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T14:48:00Z",
|
||||
"count": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"namespace": "Microsoft.Compute\/virtualMachines",
|
||||
"resourceregion": "westeurope"
|
||||
}
|
@ -0,0 +1,128 @@
|
||||
{
|
||||
"cost": 0,
|
||||
"timespan": "2019-02-09T15:21:39Z\/2019-02-09T21:21:39Z",
|
||||
"interval": "PT1H",
|
||||
"value": [
|
||||
{
|
||||
"id": "\/subscriptions\/xxx\/resourceGroups\/grafanastaging\/providers\/Microsoft.Storage\/storageAccounts\/grafanastaging\/blobServices\/default\/providers\/Microsoft.Insights\/metrics\/BlobCount",
|
||||
"type": "Microsoft.Insights\/metrics",
|
||||
"name": {
|
||||
"value": "BlobCount",
|
||||
"localizedValue": "Blob Count"
|
||||
},
|
||||
"unit": "Count",
|
||||
"timeseries": [
|
||||
{
|
||||
"metadatavalues": [
|
||||
{
|
||||
"name": {
|
||||
"value": "blobtype",
|
||||
"localizedValue": "blobtype"
|
||||
},
|
||||
"value": "PageBlob"
|
||||
}
|
||||
],
|
||||
"data": [
|
||||
{
|
||||
"timeStamp": "2019-02-09T15:21:00Z",
|
||||
"average": 3
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T16:21:00Z",
|
||||
"average": 3
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T17:21:00Z",
|
||||
"average": 3
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T18:21:00Z",
|
||||
"average": 3
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T19:21:00Z",
|
||||
"average": 3
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T20:21:00Z"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"metadatavalues": [
|
||||
{
|
||||
"name": {
|
||||
"value": "blobtype",
|
||||
"localizedValue": "blobtype"
|
||||
},
|
||||
"value": "BlockBlob"
|
||||
}
|
||||
],
|
||||
"data": [
|
||||
{
|
||||
"timeStamp": "2019-02-09T15:21:00Z",
|
||||
"average": 1
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T16:21:00Z",
|
||||
"average": 1
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T17:21:00Z",
|
||||
"average": 1
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T18:21:00Z",
|
||||
"average": 1
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T19:21:00Z",
|
||||
"average": 1
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T20:21:00Z"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"metadatavalues": [
|
||||
{
|
||||
"name": {
|
||||
"value": "blobtype",
|
||||
"localizedValue": "blobtype"
|
||||
},
|
||||
"value": "Azure Data Lake Storage"
|
||||
}
|
||||
],
|
||||
"data": [
|
||||
{
|
||||
"timeStamp": "2019-02-09T15:21:00Z",
|
||||
"average": 0
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T16:21:00Z",
|
||||
"average": 0
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T17:21:00Z",
|
||||
"average": 0
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T18:21:00Z",
|
||||
"average": 0
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T19:21:00Z",
|
||||
"average": 0
|
||||
},
|
||||
{
|
||||
"timeStamp": "2019-02-09T20:21:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"namespace": "Microsoft.Storage\/storageAccounts\/blobServices",
|
||||
"resourceregion": "westeurope"
|
||||
}
|
52
pkg/tsdb/azuremonitor/time-grain.go
Normal file
52
pkg/tsdb/azuremonitor/time-grain.go
Normal file
@ -0,0 +1,52 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
// TimeGrain handles convertions between
|
||||
// the ISO 8601 Duration format (PT1H), Kbn units (1h) and Time Grains (1 hour)
|
||||
// Also handles using the automatic Grafana interval to calculate a ISO 8601 Duration.
|
||||
type TimeGrain struct{}
|
||||
|
||||
var (
|
||||
smallTimeUnits = []string{"hour", "minute", "h", "m"}
|
||||
)
|
||||
|
||||
func (tg *TimeGrain) createISO8601DurationFromIntervalMS(interval int64) (string, error) {
|
||||
formatted := tsdb.FormatDuration(time.Duration(interval) * time.Millisecond)
|
||||
|
||||
if strings.Contains(formatted, "ms") {
|
||||
return "PT1M", nil
|
||||
}
|
||||
|
||||
timeValueString := formatted[0 : len(formatted)-1]
|
||||
timeValue, err := strconv.Atoi(timeValueString)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Could not parse interval %v to an ISO 8061 duration", interval)
|
||||
}
|
||||
|
||||
unit := formatted[len(formatted)-1:]
|
||||
|
||||
if unit == "s" && timeValue < 60 {
|
||||
// minimum interval is 1m for Azure Monitor
|
||||
return "PT1M", nil
|
||||
}
|
||||
|
||||
return tg.createISO8601Duration(timeValue, unit), nil
|
||||
}
|
||||
|
||||
func (tg *TimeGrain) createISO8601Duration(timeValue int, timeUnit string) string {
|
||||
for _, smallTimeUnit := range smallTimeUnits {
|
||||
if timeUnit == smallTimeUnit {
|
||||
return fmt.Sprintf("PT%v%v", timeValue, strings.ToUpper(timeUnit[0:1]))
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Sprintf("P%v%v", timeValue, strings.ToUpper(timeUnit[0:1]))
|
||||
}
|
71
pkg/tsdb/azuremonitor/time-grain_test.go
Normal file
71
pkg/tsdb/azuremonitor/time-grain_test.go
Normal file
@ -0,0 +1,71 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestTimeGrain(t *testing.T) {
|
||||
Convey("TimeGrain", t, func() {
|
||||
tgc := &TimeGrain{}
|
||||
|
||||
Convey("create ISO 8601 Duration", func() {
|
||||
Convey("when given a time unit smaller than a day", func() {
|
||||
minuteKbnDuration := tgc.createISO8601Duration(1, "m")
|
||||
hourKbnDuration := tgc.createISO8601Duration(2, "h")
|
||||
minuteDuration := tgc.createISO8601Duration(1, "minute")
|
||||
hourDuration := tgc.createISO8601Duration(2, "hour")
|
||||
|
||||
Convey("should convert it to a time duration", func() {
|
||||
So(minuteKbnDuration, ShouldEqual, "PT1M")
|
||||
So(hourKbnDuration, ShouldEqual, "PT2H")
|
||||
|
||||
So(minuteDuration, ShouldEqual, "PT1M")
|
||||
So(hourDuration, ShouldEqual, "PT2H")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("when given the day time unit", func() {
|
||||
kbnDuration := tgc.createISO8601Duration(1, "d")
|
||||
duration := tgc.createISO8601Duration(2, "day")
|
||||
|
||||
Convey("should convert it to a date duration", func() {
|
||||
So(kbnDuration, ShouldEqual, "P1D")
|
||||
So(duration, ShouldEqual, "P2D")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("create ISO 8601 Duration from Grafana interval in milliseconds", func() {
|
||||
Convey("and interval is less than a minute", func() {
|
||||
durationMS, err := tgc.createISO8601DurationFromIntervalMS(100)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
durationS, err := tgc.createISO8601DurationFromIntervalMS(59999)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("should be rounded up to a minute as is the minimum interval for Azure Monitor", func() {
|
||||
So(durationMS, ShouldEqual, "PT1M")
|
||||
So(durationS, ShouldEqual, "PT1M")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and interval is more than a minute", func() {
|
||||
intervals := map[string]int64{
|
||||
"10m": 600000,
|
||||
"2d": 172800000,
|
||||
}
|
||||
durationM, err := tgc.createISO8601DurationFromIntervalMS(intervals["10m"])
|
||||
So(err, ShouldBeNil)
|
||||
durationD, err := tgc.createISO8601DurationFromIntervalMS(intervals["2d"])
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("should be rounded up to a minute as is the minimum interval for Azure Monitor", func() {
|
||||
So(durationM, ShouldEqual, "PT10M")
|
||||
So(durationD, ShouldEqual, "P2D")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
77
pkg/tsdb/azuremonitor/types.go
Normal file
77
pkg/tsdb/azuremonitor/types.go
Normal file
@ -0,0 +1,77 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
// AzureMonitorQuery is the query for all the services as they have similar queries
|
||||
// with a url, a querystring and an alias field
|
||||
type AzureMonitorQuery struct {
|
||||
URL string
|
||||
UrlComponents map[string]string
|
||||
Target string
|
||||
Params url.Values
|
||||
RefID string
|
||||
Alias string
|
||||
}
|
||||
|
||||
// AzureMonitorResponse is the json response from the Azure Monitor API
|
||||
type AzureMonitorResponse struct {
|
||||
Cost int `json:"cost"`
|
||||
Timespan string `json:"timespan"`
|
||||
Interval string `json:"interval"`
|
||||
Value []struct {
|
||||
ID string `json:"id"`
|
||||
Type string `json:"type"`
|
||||
Name struct {
|
||||
Value string `json:"value"`
|
||||
LocalizedValue string `json:"localizedValue"`
|
||||
} `json:"name"`
|
||||
Unit string `json:"unit"`
|
||||
Timeseries []struct {
|
||||
Metadatavalues []struct {
|
||||
Name struct {
|
||||
Value string `json:"value"`
|
||||
LocalizedValue string `json:"localizedValue"`
|
||||
} `json:"name"`
|
||||
Value string `json:"value"`
|
||||
} `json:"metadatavalues"`
|
||||
Data []struct {
|
||||
TimeStamp time.Time `json:"timeStamp"`
|
||||
Average float64 `json:"average,omitempty"`
|
||||
Total float64 `json:"total,omitempty"`
|
||||
Count float64 `json:"count,omitempty"`
|
||||
Maximum float64 `json:"maximum,omitempty"`
|
||||
Minimum float64 `json:"minimum,omitempty"`
|
||||
} `json:"data"`
|
||||
} `json:"timeseries"`
|
||||
} `json:"value"`
|
||||
Namespace string `json:"namespace"`
|
||||
Resourceregion string `json:"resourceregion"`
|
||||
}
|
||||
|
||||
// ApplicationInsightsResponse is the json response from the Application Insights API
|
||||
type ApplicationInsightsResponse struct {
|
||||
Tables []struct {
|
||||
TableName string `json:"TableName"`
|
||||
Columns []struct {
|
||||
ColumnName string `json:"ColumnName"`
|
||||
DataType string `json:"DataType"`
|
||||
ColumnType string `json:"ColumnType"`
|
||||
} `json:"Columns"`
|
||||
Rows [][]interface{} `json:"Rows"`
|
||||
} `json:"Tables"`
|
||||
}
|
||||
|
||||
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
|
||||
type AzureLogAnalyticsResponse struct {
|
||||
Tables []struct {
|
||||
Name string `json:"name"`
|
||||
Columns []struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
} `json:"columns"`
|
||||
Rows [][]interface{} `json:"rows"`
|
||||
} `json:"tables"`
|
||||
}
|
28
pkg/tsdb/azuremonitor/url-builder.go
Normal file
28
pkg/tsdb/azuremonitor/url-builder.go
Normal file
@ -0,0 +1,28 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// urlBuilder builds the URL for calling the Azure Monitor API
|
||||
type urlBuilder struct {
|
||||
ResourceGroup string
|
||||
MetricDefinition string
|
||||
ResourceName string
|
||||
}
|
||||
|
||||
// Build checks the metric definition property to see which form of the url
|
||||
// should be returned
|
||||
func (ub *urlBuilder) Build() string {
|
||||
|
||||
if strings.Count(ub.MetricDefinition, "/") > 1 {
|
||||
rn := strings.Split(ub.ResourceName, "/")
|
||||
lastIndex := strings.LastIndex(ub.MetricDefinition, "/")
|
||||
service := ub.MetricDefinition[lastIndex+1:]
|
||||
md := ub.MetricDefinition[0:lastIndex]
|
||||
return fmt.Sprintf("resourceGroups/%s/providers/%s/%s/%s/%s/providers/microsoft.insights/metrics", ub.ResourceGroup, md, rn[0], service, rn[1])
|
||||
}
|
||||
|
||||
return fmt.Sprintf("resourceGroups/%s/providers/%s/%s/providers/microsoft.insights/metrics", ub.ResourceGroup, ub.MetricDefinition, ub.ResourceName)
|
||||
}
|
45
pkg/tsdb/azuremonitor/url-builder_test.go
Normal file
45
pkg/tsdb/azuremonitor/url-builder_test.go
Normal file
@ -0,0 +1,45 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestURLBuilder(t *testing.T) {
|
||||
Convey("AzureMonitor URL Builder", t, func() {
|
||||
|
||||
Convey("when metric definition is in the short form", func() {
|
||||
ub := &urlBuilder{
|
||||
ResourceGroup: "rg",
|
||||
MetricDefinition: "Microsoft.Compute/virtualMachines",
|
||||
ResourceName: "rn",
|
||||
}
|
||||
|
||||
url := ub.Build()
|
||||
So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/rn/providers/microsoft.insights/metrics")
|
||||
})
|
||||
|
||||
Convey("when metric definition is Microsoft.Storage/storageAccounts/blobServices", func() {
|
||||
ub := &urlBuilder{
|
||||
ResourceGroup: "rg",
|
||||
MetricDefinition: "Microsoft.Storage/storageAccounts/blobServices",
|
||||
ResourceName: "rn1/default",
|
||||
}
|
||||
|
||||
url := ub.Build()
|
||||
So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/blobServices/default/providers/microsoft.insights/metrics")
|
||||
})
|
||||
|
||||
Convey("when metric definition is Microsoft.Storage/storageAccounts/fileServices", func() {
|
||||
ub := &urlBuilder{
|
||||
ResourceGroup: "rg",
|
||||
MetricDefinition: "Microsoft.Storage/storageAccounts/fileServices",
|
||||
ResourceName: "rn1/default",
|
||||
}
|
||||
|
||||
url := ub.Build()
|
||||
So(url, ShouldEqual, "resourceGroups/rg/providers/Microsoft.Storage/storageAccounts/rn1/fileServices/default/providers/microsoft.insights/metrics")
|
||||
})
|
||||
})
|
||||
}
|
@ -59,11 +59,11 @@ func (ic *intervalCalculator) Calculate(timerange *TimeRange, minInterval time.D
|
||||
interval := time.Duration((to - from) / defaultRes)
|
||||
|
||||
if interval < minInterval {
|
||||
return Interval{Text: formatDuration(minInterval), Value: minInterval}
|
||||
return Interval{Text: FormatDuration(minInterval), Value: minInterval}
|
||||
}
|
||||
|
||||
rounded := roundInterval(interval)
|
||||
return Interval{Text: formatDuration(rounded), Value: rounded}
|
||||
return Interval{Text: FormatDuration(rounded), Value: rounded}
|
||||
}
|
||||
|
||||
func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, defaultInterval time.Duration) (time.Duration, error) {
|
||||
@ -89,7 +89,8 @@ func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, def
|
||||
return parsedInterval, nil
|
||||
}
|
||||
|
||||
func formatDuration(inter time.Duration) string {
|
||||
// FormatDuration converts a duration into the kbn format e.g. 1m 2h or 3d
|
||||
func FormatDuration(inter time.Duration) string {
|
||||
if inter >= year {
|
||||
return fmt.Sprintf("%dy", inter/year)
|
||||
}
|
||||
|
@ -51,11 +51,11 @@ func TestInterval(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("Format value", func() {
|
||||
So(formatDuration(time.Second*61), ShouldEqual, "1m")
|
||||
So(formatDuration(time.Millisecond*30), ShouldEqual, "30ms")
|
||||
So(formatDuration(time.Hour*23), ShouldEqual, "23h")
|
||||
So(formatDuration(time.Hour*24), ShouldEqual, "1d")
|
||||
So(formatDuration(time.Hour*24*367), ShouldEqual, "1y")
|
||||
So(FormatDuration(time.Second*61), ShouldEqual, "1m")
|
||||
So(FormatDuration(time.Millisecond*30), ShouldEqual, "30ms")
|
||||
So(FormatDuration(time.Hour*23), ShouldEqual, "23h")
|
||||
So(FormatDuration(time.Hour*24), ShouldEqual, "1d")
|
||||
So(FormatDuration(time.Hour*24*367), ShouldEqual, "1y")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -158,5 +158,6 @@
|
||||
},
|
||||
|
||||
"metrics": true,
|
||||
"annotations": true
|
||||
"annotations": true,
|
||||
"alerting": true
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user