Azure: Application Insights metrics to Frame and support multiple query dimensions (#25849)

- The Application Insights Service now returns a dataframe. This is a "wide" formatted dataframe with a single time index.
- Multiple "group by" dimensions may now be selected instead of just one with Application Insights.
- Some types are copied / slightly altered from the Azure Go SDK but that SDK is not imported at this time.

Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
This commit is contained in:
Kyle Brandt
2020-06-29 15:06:58 -04:00
committed by GitHub
parent 1a711e7df0
commit 9a8289b6d9
13 changed files with 682 additions and 379 deletions

View File

@@ -12,8 +12,8 @@ import (
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
@@ -24,25 +24,28 @@ import (
"golang.org/x/net/context/ctxhttp"
)
// ApplicationInsightsDatasource calls the application insights query API's
// ApplicationInsightsDatasource calls the application insights query API.
type ApplicationInsightsDatasource struct {
httpClient *http.Client
dsInfo *models.DataSource
}
// ApplicationInsightsQuery is the model that holds the information
// needed to make a metrics query to Application Insights, and the information
// used to parse the response.
type ApplicationInsightsQuery struct {
RefID string
IsRaw bool
// Text based raw query options.
ApiURL string
Params url.Values
Alias string
Target string
// Text based raw query options
ApiURL string
Params url.Values
Alias string
Target string
TimeColumnName string
ValueColumnName string
SegmentColumnName string
// These fields are used when parsing the response.
metricName string
dimensions []string
aggregation string
}
func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) {
@@ -109,24 +112,23 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time
}
params.Add("aggregation", insightsJSONModel.Aggregation)
dimension := strings.TrimSpace(insightsJSONModel.Dimension)
// Azure Monitor combines this and the following logic such that if dimensionFilter, must also Dimension, should that be done here as well?
if dimension != "" && !strings.EqualFold(dimension, "none") {
params.Add("segment", dimension)
}
dimensionFilter := strings.TrimSpace(insightsJSONModel.DimensionFilter)
if dimensionFilter != "" {
params.Add("filter", dimensionFilter)
}
if len(insightsJSONModel.Dimensions) != 0 {
params.Add("segment", strings.Join(insightsJSONModel.Dimensions, ","))
}
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{
RefID: query.RefId,
IsRaw: false,
ApiURL: azureURL,
Params: params,
Alias: insightsJSONModel.Alias,
Target: params.Encode(),
RefID: query.RefId,
ApiURL: azureURL,
Params: params,
Alias: insightsJSONModel.Alias,
Target: params.Encode(),
metricName: insightsJSONModel.MetricName,
aggregation: insightsJSONModel.Aggregation,
dimensions: insightsJSONModel.Dimensions,
})
}
@@ -180,12 +182,18 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
return nil, fmt.Errorf("Request failed status: %v", res.Status)
}
queryResult.Series, err = e.parseTimeSeriesFromMetrics(body, query)
mr := MetricsResult{}
err = json.Unmarshal(body, &mr)
if err != nil {
return nil, err
}
frame, err := InsightsMetricsResultToFrame(mr, query.metricName, query.aggregation, query.dimensions)
if err != nil {
queryResult.Error = err
return queryResult, nil
}
queryResult.Dataframes = tsdb.NewDecodedDataFrames(data.Frames{frame})
return queryResult, nil
}
@@ -242,240 +250,3 @@ func (e *ApplicationInsightsDatasource) getPluginRoute(plugin *plugins.DataSourc
return pluginRoute, pluginRouteName, nil
}
func (e *ApplicationInsightsDatasource) parseTimeSeriesFromMetrics(body []byte, query *ApplicationInsightsQuery) (tsdb.TimeSeriesSlice, error) {
doc, err := simplejson.NewJson(body)
if err != nil {
return nil, err
}
value := doc.Get("value").MustMap()
if value == nil {
return nil, errors.New("could not find value element")
}
endStr, ok := value["end"].(string)
if !ok {
return nil, errors.New("missing 'end' value in response")
}
endTime, err := time.Parse(time.RFC3339Nano, endStr)
if err != nil {
return nil, fmt.Errorf("bad 'end' value: %v", err)
}
for k, v := range value {
switch k {
case "start":
case "end":
case "interval":
case "segments":
// we have segments!
return parseSegmentedValueTimeSeries(query, endTime, v)
default:
return parseSingleValueTimeSeries(query, k, endTime, v)
}
}
azlog.Error("Bad response from application insights/metrics", "body", string(body))
return nil, errors.New("could not find expected values in response")
}
func parseSegmentedValueTimeSeries(query *ApplicationInsightsQuery, endTime time.Time, segmentsJson interface{}) (tsdb.TimeSeriesSlice, error) {
segments, ok := segmentsJson.([]interface{})
if !ok {
return nil, errors.New("bad segments value")
}
slice := tsdb.TimeSeriesSlice{}
seriesMap := map[string]*tsdb.TimeSeriesPoints{}
for _, segment := range segments {
segmentMap, ok := segment.(map[string]interface{})
if !ok {
return nil, errors.New("bad segments value")
}
err := processSegment(&slice, segmentMap, query, endTime, seriesMap)
if err != nil {
return nil, err
}
}
return slice, nil
}
func processSegment(slice *tsdb.TimeSeriesSlice, segment map[string]interface{}, query *ApplicationInsightsQuery, endTime time.Time, pointMap map[string]*tsdb.TimeSeriesPoints) error {
var segmentName string
var segmentValue string
var childSegments []interface{}
hasChildren := false
var value float64
var valueName string
var ok bool
var err error
for k, v := range segment {
switch k {
case "start":
case "end":
endStr, ok := v.(string)
if !ok {
return errors.New("missing 'end' value in response")
}
endTime, err = time.Parse(time.RFC3339Nano, endStr)
if err != nil {
return fmt.Errorf("bad 'end' value: %v", err)
}
case "segments":
childSegments, ok = v.([]interface{})
if !ok {
return errors.New("invalid format segments")
}
hasChildren = true
default:
mapping, hasValues := v.(map[string]interface{})
if hasValues {
valueName = k
value, err = getAggregatedValue(mapping, valueName)
if err != nil {
return err
}
} else {
segmentValue, ok = v.(string)
if !ok {
return fmt.Errorf("invalid mapping for key %v", k)
}
segmentName = k
}
}
}
if hasChildren {
for _, s := range childSegments {
segmentMap, ok := s.(map[string]interface{})
if !ok {
return errors.New("invalid format segments")
}
if err := processSegment(slice, segmentMap, query, endTime, pointMap); err != nil {
return err
}
}
} else {
aliased := formatApplicationInsightsLegendKey(query.Alias, valueName, segmentName, segmentValue)
if segmentValue == "" {
segmentValue = valueName
}
points, ok := pointMap[segmentValue]
if !ok {
series := tsdb.NewTimeSeries(aliased, tsdb.TimeSeriesPoints{})
points = &series.Points
*slice = append(*slice, series)
pointMap[segmentValue] = points
}
*points = append(*points, tsdb.NewTimePoint(null.FloatFrom(value), float64(endTime.Unix()*1000)))
}
return nil
}
func parseSingleValueTimeSeries(query *ApplicationInsightsQuery, metricName string, endTime time.Time, valueJson interface{}) (tsdb.TimeSeriesSlice, error) {
legend := formatApplicationInsightsLegendKey(query.Alias, metricName, "", "")
valueMap, ok := valueJson.(map[string]interface{})
if !ok {
return nil, errors.New("bad value aggregation")
}
metricValue, err := getAggregatedValue(valueMap, metricName)
if err != nil {
return nil, err
}
return []*tsdb.TimeSeries{
tsdb.NewTimeSeries(
legend,
tsdb.TimeSeriesPoints{
tsdb.NewTimePoint(
null.FloatFrom(metricValue),
float64(endTime.Unix()*1000)),
},
),
}, nil
}
func getAggregatedValue(valueMap map[string]interface{}, valueName string) (float64, error) {
aggValue := ""
var metricValue float64
var err error
for k, v := range valueMap {
if aggValue != "" {
return 0, fmt.Errorf("found multiple aggregations, %v, %v", aggValue, k)
}
if k == "" {
return 0, errors.New("found no aggregation name")
}
aggValue = k
metricValue, err = getFloat(v)
if err != nil {
return 0, fmt.Errorf("bad value: %v", err)
}
}
if aggValue == "" {
return 0, fmt.Errorf("no aggregation value found for %v", valueName)
}
return metricValue, nil
}
func getFloat(in interface{}) (float64, error) {
if out, ok := in.(float32); ok {
return float64(out), nil
} else if out, ok := in.(int32); ok {
return float64(out), nil
} else if out, ok := in.(json.Number); ok {
return out.Float64()
} else if out, ok := in.(int64); ok {
return float64(out), nil
} else if out, ok := in.(float64); ok {
return out, nil
}
return 0, fmt.Errorf("cannot convert '%v' to float32", in)
}
// formatApplicationInsightsLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatApplicationInsightsLegendKey(alias string, metricName string, dimensionName string, dimensionValue string) string {
if alias == "" {
if len(dimensionName) > 0 {
return fmt.Sprintf("{%s=%s}.%s", dimensionName, dimensionValue, metricName)
}
return metricName
}
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
switch metaPartName {
case "metric":
return []byte(metricName)
case "dimensionname", "groupbyname":
return []byte(dimensionName)
case "dimensionvalue", "groupbyvalue":
return []byte(dimensionValue)
}
return in
})
return string(result)
}

View File

@@ -1,8 +1,8 @@
package azuremonitor
import (
"encoding/json"
"fmt"
"io/ioutil"
"testing"
"time"
@@ -143,86 +143,6 @@ func TestApplicationInsightsDatasource(t *testing.T) {
So(queries[0].Target, ShouldEqual, "aggregation=Average&interval=PT1M&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
})
})
Convey("Parse Application Insights metrics API", func() {
Convey("single value", func() {
data, err := ioutil.ReadFile("testdata/applicationinsights/3-application-insights-response-metrics-single-value.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
IsRaw: false,
}
series, err := datasource.parseTimeSeriesFromMetrics(data, query)
So(err, ShouldBeNil)
So(len(series), ShouldEqual, 1)
So(series[0].Name, ShouldEqual, "value")
So(len(series[0].Points), ShouldEqual, 1)
So(series[0].Points[0][0].Float64, ShouldEqual, 1.2)
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000))
})
Convey("1H separation", func() {
data, err := ioutil.ReadFile("testdata/applicationinsights/4-application-insights-response-metrics-no-segment.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
IsRaw: false,
}
series, err := datasource.parseTimeSeriesFromMetrics(data, query)
So(err, ShouldBeNil)
So(len(series), ShouldEqual, 1)
So(series[0].Name, ShouldEqual, "value")
So(len(series[0].Points), ShouldEqual, 2)
So(series[0].Points[0][0].Float64, ShouldEqual, 1)
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000))
So(series[0].Points[1][0].Float64, ShouldEqual, 2)
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568343723000))
Convey("with segmentation", func() {
data, err := ioutil.ReadFile("testdata/applicationinsights/4-application-insights-response-metrics-segmented.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
IsRaw: false,
}
series, err := datasource.parseTimeSeriesFromMetrics(data, query)
So(err, ShouldBeNil)
So(len(series), ShouldEqual, 2)
So(series[0].Name, ShouldEqual, "{blob=a}.value")
So(len(series[0].Points), ShouldEqual, 2)
So(series[0].Points[0][0].Float64, ShouldEqual, 1)
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000))
So(series[0].Points[1][0].Float64, ShouldEqual, 2)
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568343723000))
So(series[1].Name, ShouldEqual, "{blob=b}.value")
So(len(series[1].Points), ShouldEqual, 2)
So(series[1].Points[0][0].Float64, ShouldEqual, 3)
So(series[1].Points[0][1].Float64, ShouldEqual, int64(1568340123000))
So(series[1].Points[1][0].Float64, ShouldEqual, 4)
So(series[1].Points[1][1].Float64, ShouldEqual, int64(1568343723000))
Convey("with alias", func() {
data, err := ioutil.ReadFile("testdata/applicationinsights/4-application-insights-response-metrics-segmented.json")
So(err, ShouldBeNil)
query := &ApplicationInsightsQuery{
IsRaw: false,
Alias: "{{metric}} {{dimensionname}} {{dimensionvalue}}",
}
series, err := datasource.parseTimeSeriesFromMetrics(data, query)
So(err, ShouldBeNil)
So(len(series), ShouldEqual, 2)
So(series[0].Name, ShouldEqual, "value blob a")
So(series[1].Name, ShouldEqual, "value blob b")
})
})
})
})
})
}
@@ -291,3 +211,20 @@ func TestAppInsightsPluginRoutes(t *testing.T) {
}
}
func TestInsightsDimensionsUnmarshalJSON(t *testing.T) {
a := []byte(`"foo"`)
b := []byte(`["foo"]`)
var as InsightsDimensions
var bs InsightsDimensions
err := json.Unmarshal(a, &as)
require.NoError(t, err)
require.Equal(t, []string{"foo"}, []string(as))
err = json.Unmarshal(b, &bs)
require.NoError(t, err)
require.Equal(t, []string{"foo"}, []string(bs))
}

View File

@@ -0,0 +1,306 @@
package azuremonitor
import (
"encoding/json"
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// InsightsMetricsResultToFrame converts a MetricsResult (an Application Insights metrics query response) to a dataframe.
// Due to the dynamic nature of the MetricsResult object, the name of the metric, aggregation,
// and requested dimensions are used to determine the expected shape of the object.
// This builds all series into a single data.Frame with one time index (a wide formatted time series frame).
func InsightsMetricsResultToFrame(mr MetricsResult, metric, agg string, dimensions []string) (*data.Frame, error) {
dimLen := len(dimensions)
// The Response has both Start and End times, so we name the column "StartTime".
frame := data.NewFrame("", data.NewField("StartTime", nil, []time.Time{}))
fieldIdxMap := map[string]int{} // a map of a string representation of the labels to the Field index in the frame.
rowCounter := 0 // row in the resulting frame
if mr.Value == nil { // never seen this response, but to ensure there is no panic
return nil, fmt.Errorf("unexpected nil response or response value in metrics result")
}
for _, seg := range *mr.Value.Segments { // each top level segment in the response shares timestamps.
frame.Extend(1)
frame.Set(0, rowCounter, seg.Start) // field 0 is the time field
labels := data.Labels{}
// handleLeafSegment is for the leaf MetricsSegmentInfo nodes in the response.
// A leaf node contains an aggregated value, and when there are multiple dimensions, a label key/value pair.
handleLeafSegment := func(s MetricsSegmentInfo) error {
// since this is a dynamic response, everything we are interested in here from JSON
// is Marshalled (mapped) into the AdditionalProperties property.
v, err := valFromLeafAP(s.AdditionalProperties, metric, agg)
if err != nil {
return err
}
if dimLen != 0 { // when there are dimensions, the final dimension is in this inner segment.
dimension := dimensions[dimLen-1]
dimVal, err := dimValueFromAP(s.AdditionalProperties, dimension)
if err != nil {
return err
}
labels[dimension] = dimVal
}
if _, ok := fieldIdxMap[labels.String()]; !ok {
// When we find a new combination of labels for the metric, a new Field is appended.
frame.Fields = append(frame.Fields, data.NewField(metric, labels.Copy(), make([]*float64, rowCounter+1)))
fieldIdxMap[labels.String()] = len(frame.Fields) - 1
}
frame.Set(fieldIdxMap[labels.String()], rowCounter, v)
return nil
}
// Simple case with no segments/dimensions
if dimLen == 0 {
if err := handleLeafSegment(seg); err != nil {
return nil, err
}
rowCounter++
continue
}
// Multiple dimension case
var traverse func(segments *[]MetricsSegmentInfo, depth int) error
// traverse walks segments collecting dimensions into labels until leaf segments are
// reached, and then handleInnerSegment is called. The final k/v label pair is
// in the leaf segment.
// A non-recursive implementation would probably be better.
traverse = func(segments *[]MetricsSegmentInfo, depth int) error {
if segments == nil {
return nil
}
for _, seg := range *segments {
if seg.Segments == nil {
if err := handleLeafSegment(seg); err != nil {
return err
}
continue
}
dimension := dimensions[depth]
dimVal, err := dimValueFromAP(seg.AdditionalProperties, dimension)
if err != nil {
return err
}
labels[dimension] = dimVal
if err := traverse(seg.Segments, depth+1); err != nil {
return err
}
}
return nil
}
if err := traverse(seg.Segments, 0); err != nil {
return nil, err
}
rowCounter++
}
return frame, nil
}
// valFromLeafAP extracts value for the given metric and aggregation (agg)
// from the dynamic AdditionalProperties properties of a leaf node. It is for use in the InsightsMetricsResultToFrame
// function.
func valFromLeafAP(ap map[string]interface{}, metric, agg string) (*float64, error) {
if ap == nil {
return nil, fmt.Errorf("expected additional properties for metric %v not found in leaf segment", metric)
}
met, ok := ap[metric]
if !ok {
return nil, fmt.Errorf("expected additional properties for metric %v not found in leaf segment", metric)
}
metMap, ok := met.(map[string]interface{})
if !ok {
return nil, fmt.Errorf("unexpected type for additional properties not found in leaf segment, want map[string]interface{}, but got %T", met)
}
metVal, ok := metMap[agg]
if !ok {
return nil, fmt.Errorf("expected value for aggregation %v not found in leaf segment", agg)
}
var v *float64
if val, ok := metVal.(float64); ok {
v = &val
}
return v, nil
}
// dimValueFromAP fetches the value as a string for the corresponding dimension from the dynamic AdditionalProperties properties of a leaf node. It is for use in the InsightsMetricsResultToFrame
// function.
func dimValueFromAP(ap map[string]interface{}, dimension string) (string, error) {
rawDimValue, ok := ap[dimension]
if !ok {
return "", fmt.Errorf("expected dimension key %v not found in response", dimension)
}
dimValue, ok := rawDimValue.(string)
if !ok {
return "", fmt.Errorf("unexpected non-string value for the value for dimension %v, got type %T with a value of %v", dimension, rawDimValue, dimValue)
}
return dimValue, nil
}
// MetricsResult a metric result.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights.
type MetricsResult struct {
Value *MetricsResultInfo `json:"value,omitempty"`
}
// MetricsResultInfo a metric result data.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
type MetricsResultInfo struct {
// AdditionalProperties - Unmatched properties from the message are deserialized this collection
AdditionalProperties map[string]interface{} `json:""`
// Start - Start time of the metric.
Start time.Time `json:"start,omitempty"`
// End - Start time of the metric.
End time.Time `json:"end,omitempty"`
// Interval - The interval used to segment the metric data.
Interval *string `json:"interval,omitempty"`
// Segments - Segmented metric data (if segmented).
Segments *[]MetricsSegmentInfo `json:"segments,omitempty"`
}
// MetricsSegmentInfo is a metric segment.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
type MetricsSegmentInfo struct {
// AdditionalProperties - Unmatched properties from the message are deserialized this collection
AdditionalProperties map[string]interface{} `json:""`
// Start - Start time of the metric segment (only when an interval was specified).
Start time.Time `json:"start,omitempty"`
// End - Start time of the metric segment (only when an interval was specified).
End time.Time `json:"end,omitempty"`
// Segments - Segmented metric data (if further segmented).
Segments *[]MetricsSegmentInfo `json:"segments,omitempty"`
}
// UnmarshalJSON is the custom unmarshaler for MetricsSegmentInfo struct.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
func (mri *MetricsSegmentInfo) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
default:
if v != nil {
var additionalProperties interface{}
err = json.Unmarshal(*v, &additionalProperties)
if err != nil {
return err
}
if mri.AdditionalProperties == nil {
mri.AdditionalProperties = make(map[string]interface{})
}
mri.AdditionalProperties[k] = additionalProperties
}
case "start":
if v != nil {
var start time.Time
err = json.Unmarshal(*v, &start)
if err != nil {
return err
}
mri.Start = start
}
case "end":
if v != nil {
var end time.Time
err = json.Unmarshal(*v, &end)
if err != nil {
return err
}
mri.End = end
}
case "segments":
if v != nil {
var segments []MetricsSegmentInfo
err = json.Unmarshal(*v, &segments)
if err != nil {
return err
}
mri.Segments = &segments
}
}
}
return nil
}
// UnmarshalJSON is the custom unmarshaler for MetricsResultInfo struct.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
func (mri *MetricsResultInfo) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
default:
if v != nil {
var additionalProperties interface{}
err = json.Unmarshal(*v, &additionalProperties)
if err != nil {
return err
}
if mri.AdditionalProperties == nil {
mri.AdditionalProperties = make(map[string]interface{})
}
mri.AdditionalProperties[k] = additionalProperties
}
case "start":
if v != nil {
var start time.Time
err = json.Unmarshal(*v, &start)
if err != nil {
return err
}
mri.Start = start
}
case "end":
if v != nil {
var end time.Time
err = json.Unmarshal(*v, &end)
if err != nil {
return err
}
mri.End = end
}
case "interval":
if v != nil {
var interval string
err = json.Unmarshal(*v, &interval)
if err != nil {
return err
}
mri.Interval = &interval
}
case "segments":
if v != nil {
var segments []MetricsSegmentInfo
err = json.Unmarshal(*v, &segments)
if err != nil {
return err
}
mri.Segments = &segments
}
}
}
return nil
}

View File

@@ -0,0 +1,134 @@
package azuremonitor
import (
"encoding/json"
"os"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
)
func TestInsightsMetricsResultToFrame(t *testing.T) {
tests := []struct {
name string
testFile string
metric string
agg string
dimensions []string
expectedFrame func() *data.Frame
}{
{
name: "single series",
testFile: "applicationinsights/4-application-insights-response-metrics-no-segment.json",
metric: "value",
agg: "avg",
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2019, 9, 13, 1, 2, 3, 456789000, time.UTC),
time.Date(2019, 9, 13, 2, 2, 3, 456789000, time.UTC),
}),
data.NewField("value", nil, []*float64{
pointer.Float64(1),
pointer.Float64(2),
}),
)
return frame
},
},
{
name: "segmented series",
testFile: "applicationinsights/4-application-insights-response-metrics-segmented.json",
metric: "value",
agg: "avg",
dimensions: []string{"blob"},
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2019, 9, 13, 1, 2, 3, 456789000, time.UTC),
time.Date(2019, 9, 13, 2, 2, 3, 456789000, time.UTC),
}),
data.NewField("value", data.Labels{"blob": "a"}, []*float64{
pointer.Float64(1),
pointer.Float64(2),
}),
data.NewField("value", data.Labels{"blob": "b"}, []*float64{
pointer.Float64(3),
pointer.Float64(4),
}),
)
return frame
},
},
{
name: "segmented series",
testFile: "applicationinsights/4-application-insights-response-metrics-multi-segmented.json",
metric: "traces/count",
agg: "sum",
dimensions: []string{"client/countryOrRegion", "client/city"},
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2020, 6, 25, 16, 15, 32, 14e7, time.UTC),
time.Date(2020, 6, 25, 16, 16, 0, 0, time.UTC),
}),
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
nil,
}),
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
pointer.Float64(1),
}),
data.NewField("traces/count", data.Labels{"client/city": "", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(11),
}),
data.NewField("traces/count", data.Labels{"client/city": "Chicago", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(3),
}),
data.NewField("traces/count", data.Labels{"client/city": "Tokyo", "client/countryOrRegion": "Japan"}, []*float64{
nil,
pointer.Float64(1),
}),
)
return frame
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
res, err := loadInsightsMetricsResponse(tt.testFile)
require.NoError(t, err)
frame, err := InsightsMetricsResultToFrame(res, tt.metric, tt.agg, tt.dimensions)
require.NoError(t, err)
if diff := cmp.Diff(tt.expectedFrame(), frame, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func loadInsightsMetricsResponse(name string) (MetricsResult, error) {
var mr MetricsResult
path := filepath.Join("testdata", name)
f, err := os.Open(path)
if err != nil {
return mr, err
}
defer f.Close()
d := json.NewDecoder(f)
err = d.Decode(&mr)
return mr, err
}

View File

@@ -1,9 +0,0 @@
{
"value": {
"start": "2019-09-13T01:02:03.456789Z",
"end": "2019-09-13T02:02:03.456789Z",
"value": {
"avg": 1.2
}
}
}

View File

@@ -0,0 +1,72 @@
{
"value": {
"start": "2020-06-25T16:15:32.140Z",
"end": "2020-06-25T16:19:32.140Z",
"interval": "PT2M",
"segments": [
{
"start": "2020-06-25T16:15:32.140Z",
"end": "2020-06-25T16:16:00.000Z",
"segments": [
{
"client/countryOrRegion": "United States",
"segments": [
{
"traces/count": {
"sum": 2
},
"client/city": "Washington"
},
{
"traces/count": {
"sum": 2
},
"client/city": "Des Moines"
}
]
}
]
},
{
"start": "2020-06-25T16:16:00.000Z",
"end": "2020-06-25T16:18:00.000Z",
"segments": [
{
"client/countryOrRegion": "United States",
"segments": [
{
"traces/count": {
"sum": 11
},
"client/city": ""
},
{
"traces/count": {
"sum": 3
},
"client/city": "Chicago"
},
{
"traces/count": {
"sum": 1
},
"client/city": "Des Moines"
}
]
},
{
"client/countryOrRegion": "Japan",
"segments": [
{
"traces/count": {
"sum": 1
},
"client/city": "Tokyo"
}
]
}
]
}
]
}
}

View File

@@ -1,7 +1,10 @@
package azuremonitor
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"time"
)
@@ -101,13 +104,13 @@ type azureMonitorJSONQuery struct {
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query.
type insightsJSONQuery struct {
AppInsights struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimension string `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
MetricName string `json:"metricName"`
TimeGrain string `json:"timeGrain"`
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimensions InsightsDimensions `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
MetricName string `json:"metricName"`
TimeGrain string `json:"timeGrain"`
} `json:"appInsights"`
Raw *bool `json:"raw"`
}
@@ -127,3 +130,41 @@ type logJSONQuery struct {
Workspace string `json:"workspace"`
} `json:"azureLogAnalytics"`
}
// InsightsDimensions will unmarshal from a JSON string, or an array of strings,
// into a string array. This exists to support an older query format which is updated
// when a user saves the query or it is sent from the front end, but may not be when
// alerting fetches the model.
type InsightsDimensions []string
// UnmarshalJSON fulfills the json.Unmarshaler interface type.
func (s *InsightsDimensions) UnmarshalJSON(data []byte) error {
*s = InsightsDimensions{}
if string(data) == "null" || string(data) == "" {
return nil
}
if strings.ToLower(string(data)) == `"none"` {
return nil
}
if data[0] == '[' {
var sa []string
err := json.Unmarshal(data, &sa)
if err != nil {
return err
}
*s = InsightsDimensions(sa)
return nil
}
var str string
err := json.Unmarshal(data, &str)
if err != nil {
return fmt.Errorf("could not parse %q as string or array: %w", string(data), err)
}
if str != "" {
*s = InsightsDimensions{str}
return nil
}
return nil
}

View File

@@ -375,7 +375,7 @@ describe('AppInsightsDatasource', () => {
expect(options.url).toContain('/api/ds/query');
expect(options.data.queries[0].appInsights.rawQueryString).toBeUndefined();
expect(options.data.queries[0].appInsights.metricName).toBe('exceptions/server');
expect(options.data.queries[0].appInsights.dimension).toBe('client/city');
expect([...options.data.queries[0].appInsights.dimension]).toMatchObject(['client/city']);
return Promise.resolve({ data: response, status: 200 });
});
});

View File

@@ -1,7 +1,7 @@
import { ScopedVars } from '@grafana/data';
import { DataQueryRequest, DataSourceInstanceSettings } from '@grafana/data';
import { getBackendSrv, getTemplateSrv, DataSourceWithBackend } from '@grafana/runtime';
import _ from 'lodash';
import _, { isString } from 'lodash';
import TimegrainConverter from '../time_grain_converter';
import { AzureDataSourceJsonData, AzureMonitorQuery, AzureQueryType } from '../types';
@@ -84,12 +84,24 @@ export default class AppInsightsDatasource extends DataSourceWithBackend<AzureMo
// migration for non-standard names
if (old.groupBy && !item.dimension) {
item.dimension = old.groupBy;
item.dimension = [old.groupBy];
}
if (old.filter && !item.dimensionFilter) {
item.dimensionFilter = old.filter;
}
// Migrate single dimension string to array
if (isString(item.dimension)) {
if (item.dimension === 'None') {
item.dimension = [];
} else {
item.dimension = [item.dimension as string];
}
}
if (!item.dimension) {
item.dimension = [];
}
const templateSrv = getTemplateSrv();
return {
@@ -102,7 +114,7 @@ export default class AppInsightsDatasource extends DataSourceWithBackend<AzureMo
allowedTimeGrainsMs: item.allowedTimeGrainsMs,
metricName: templateSrv.replace(item.metricName, scopedVars),
aggregation: templateSrv.replace(item.aggregation, scopedVars),
dimension: templateSrv.replace(item.dimension, scopedVars),
dimension: item.dimension.map(d => templateSrv.replace(d, scopedVars)),
dimensionFilter: templateSrv.replace(item.dimensionFilter, scopedVars),
alias: item.alias,
format: target.format,

View File

@@ -363,27 +363,30 @@
<div class="gf-form-inline">
<div class="gf-form">
<label class="gf-form-label query-keyword width-9">Group By</label>
</div>
<div ng-repeat="d in ctrl.target.appInsights.dimension track by $index"
class="gf-form"
ng-click="ctrl.removeGroupBy($index);"
onmouseover="this.style['text-decoration'] = 'line-through';"
onmouseout="this.style['text-decoration'] = '';">
<label class="gf-form-label"
style="cursor: pointer;">{{d}} <icon name="'times'"></icon></label>
</div>
<div>
<gf-form-dropdown
allow-custom="true"
ng-hide="ctrl.target.appInsights.dimension !== 'none'"
model="ctrl.target.appInsights.dimension"
lookup-text="true"
placeholder="Add"
model="ctrl.dummyDiminsionString"
get-options="ctrl.getAppInsightsGroupBySegments($query)"
on-change="ctrl.refresh()"
css-class="min-width-20"
on-change="ctrl.getAppInsightsGroupBySegments"
css-class="min-width-5"
>
</gf-form-dropdown>
<label
class="gf-form-label min-width-20 pointer"
ng-hide="ctrl.target.appInsights.dimension === 'none'"
ng-click="ctrl.resetAppInsightsGroupBy()"
>{{ctrl.target.appInsights.dimension}}
<icon name="'times'"></icon>
</label>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<label class="gf-form-label query-keyword width-9">Filter</label>
<label class="gf-form-label query-keyword">Filter</label>
<input
type="text"
class="gf-form-input width-17"

View File

@@ -40,7 +40,7 @@ describe('AzureMonitorQueryCtrl', () => {
expect(queryCtrl.target.azureMonitor.resourceName).toBe('select');
expect(queryCtrl.target.azureMonitor.metricNamespace).toBe('select');
expect(queryCtrl.target.azureMonitor.metricName).toBe('select');
expect(queryCtrl.target.appInsights.dimension).toBe('none');
expect(queryCtrl.target.appInsights.dimension).toMatchObject([]);
});
});

View File

@@ -20,6 +20,8 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
defaultDropdownValue = 'select';
dummyDiminsionString = '+';
target: {
// should be: AzureMonitorQuery
refId: string;
@@ -104,7 +106,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
},
appInsights: {
metricName: this.defaultDropdownValue,
dimension: 'none',
// dimension: [],
timeGrain: 'auto',
},
insightsAnalytics: {
@@ -135,6 +137,8 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
this.migrateApplicationInsightsKeys();
this.migrateApplicationInsightsDimensions();
this.panelCtrl.events.on(PanelEvents.dataReceived, this.onDataReceived.bind(this), $scope);
this.panelCtrl.events.on(PanelEvents.dataError, this.onDataError.bind(this), $scope);
this.resultFormats = [
@@ -270,6 +274,18 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
}
}
migrateApplicationInsightsDimensions() {
const { appInsights } = this.target;
if (!appInsights.dimension) {
appInsights.dimension = [];
}
if (_.isString(appInsights.dimension)) {
appInsights.dimension = [appInsights.dimension as string];
}
}
replace(variable: string) {
return this.templateSrv.replace(variable, this.panelCtrl.panel.scopedVars);
}
@@ -625,8 +641,27 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
return this.datasource.appInsightsDatasource.getQuerySchema().catch(this.handleQueryCtrlError.bind(this));
};
removeGroupBy = (index: number) => {
const { appInsights } = this.target;
appInsights.dimension.splice(index, 1);
this.refresh();
};
getAppInsightsGroupBySegments(query: any) {
return _.map(this.target.appInsights.dimensions, (option: string) => {
const { appInsights } = this.target;
// HACK alert... there must be a better way!
if (this.dummyDiminsionString && this.dummyDiminsionString.length && '+' !== this.dummyDiminsionString) {
if (!appInsights.dimension) {
appInsights.dimension = [];
}
appInsights.dimension.push(this.dummyDiminsionString);
this.dummyDiminsionString = '+';
this.refresh();
}
// Return the list of dimensions stored on the query object from the last request :(
return _.map(appInsights.dimensions, (option: string) => {
return { text: option, value: option };
});
}

View File

@@ -73,7 +73,8 @@ export interface ApplicationInsightsQuery {
timeGrain: string;
allowedTimeGrainsMs: number[];
aggregation: string;
dimension: string;
dimension: string[]; // Was string before 7.1
// dimensions: string[]; why is this metadata stored on the object!
dimensionFilter: string;
alias: string;
}