mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
AzureMonitor: Alerting for Azure Application Insights (#19381)
* Convert Azure Application Insights datasource to Go Allows for alerting of Application Insights data source Closes: #15153 * Fix timeGrainReset * Default time interval for querys for alerts * Fix a few rename related bugs * Update readme to indicate App Insights alerting * Fix typo and add tests to ensure migration is happening * Address code review feedback (mostly typos and unintended changes)
This commit is contained in:
parent
92765a6c6f
commit
20faef8de5
@ -216,7 +216,9 @@ Examples:
|
||||
|
||||
### Application Insights Alerting
|
||||
|
||||
Not implemented yet.
|
||||
Grafana alerting is supported for Application Insights. This is not Azure Alerts support. Read more about how alerting in Grafana works [here]({{< relref "alerting/rules.md" >}}).
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v60/azuremonitor-alerting.png" class="docs-image--no-shadow" caption="Azure Monitor Alerting" >}}
|
||||
|
||||
## Querying the Azure Log Analytics Service
|
||||
|
||||
|
592
pkg/tsdb/azuremonitor/applicationinsights-datasource.go
Normal file
592
pkg/tsdb/azuremonitor/applicationinsights-datasource.go
Normal file
@ -0,0 +1,592 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
"github.com/opentracing/opentracing-go"
|
||||
"golang.org/x/net/context/ctxhttp"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ApplicationInsightsDatasource calls the application insights query API's
|
||||
type ApplicationInsightsDatasource struct {
|
||||
httpClient *http.Client
|
||||
dsInfo *models.DataSource
|
||||
}
|
||||
|
||||
type ApplicationInsightsQuery struct {
|
||||
RefID string
|
||||
|
||||
IsRaw bool
|
||||
|
||||
// Text based raw query options
|
||||
ApiURL string
|
||||
Params url.Values
|
||||
Alias string
|
||||
Target string
|
||||
TimeColumnName string
|
||||
ValueColumnName string
|
||||
SegmentColumnName string
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{
|
||||
Results: map[string]*tsdb.QueryResult{},
|
||||
}
|
||||
|
||||
queries, err := e.buildQueries(originalQueries, timeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
queryRes, err := e.executeQuery(ctx, query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Results[query.RefID] = queryRes
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*ApplicationInsightsQuery, error) {
|
||||
applicationInsightsQueries := []*ApplicationInsightsQuery{}
|
||||
startTime, err := timeRange.ParseFrom()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
endTime, err := timeRange.ParseTo()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
applicationInsightsTarget := query.Model.Get("appInsights").MustMap()
|
||||
azlog.Debug("Application Insights", "target", applicationInsightsTarget)
|
||||
|
||||
rawQuery := false
|
||||
if asInterface, ok := applicationInsightsTarget["rawQuery"]; ok {
|
||||
if asBool, ok := asInterface.(bool); ok {
|
||||
rawQuery = asBool
|
||||
} else {
|
||||
return nil, errors.New("'rawQuery' should be a boolean")
|
||||
}
|
||||
} else {
|
||||
return nil, errors.New("missing 'rawQuery' property")
|
||||
}
|
||||
|
||||
if rawQuery {
|
||||
var rawQueryString string
|
||||
if asInterface, ok := applicationInsightsTarget["rawQueryString"]; ok {
|
||||
if asString, ok := asInterface.(string); ok {
|
||||
rawQueryString = asString
|
||||
}
|
||||
}
|
||||
if rawQueryString == "" {
|
||||
return nil, errors.New("rawQuery requires rawQueryString")
|
||||
}
|
||||
|
||||
rawQueryString, err := KqlInterpolate(query, timeRange, fmt.Sprintf("%v", rawQueryString))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
params := url.Values{}
|
||||
params.Add("query", rawQueryString)
|
||||
|
||||
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{
|
||||
RefID: query.RefId,
|
||||
IsRaw: true,
|
||||
ApiURL: "query",
|
||||
Params: params,
|
||||
TimeColumnName: fmt.Sprintf("%v", applicationInsightsTarget["timeColumn"]),
|
||||
ValueColumnName: fmt.Sprintf("%v", applicationInsightsTarget["valueColumn"]),
|
||||
SegmentColumnName: fmt.Sprintf("%v", applicationInsightsTarget["segmentColumn"]),
|
||||
Target: params.Encode(),
|
||||
})
|
||||
} else {
|
||||
alias := ""
|
||||
if val, ok := applicationInsightsTarget["alias"]; ok {
|
||||
alias = fmt.Sprintf("%v", val)
|
||||
}
|
||||
|
||||
azureURL := fmt.Sprintf("metrics/%s", fmt.Sprintf("%v", applicationInsightsTarget["metricName"]))
|
||||
timeGrain := fmt.Sprintf("%v", applicationInsightsTarget["timeGrain"])
|
||||
timeGrains := applicationInsightsTarget["allowedTimeGrainsMs"]
|
||||
if timeGrain == "auto" {
|
||||
timeGrain, err = setAutoTimeGrain(query.IntervalMs, timeGrains)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
params := url.Values{}
|
||||
params.Add("timespan", fmt.Sprintf("%v/%v", startTime.UTC().Format(time.RFC3339), endTime.UTC().Format(time.RFC3339)))
|
||||
if timeGrain != "none" {
|
||||
params.Add("interval", timeGrain)
|
||||
}
|
||||
params.Add("aggregation", fmt.Sprintf("%v", applicationInsightsTarget["aggregation"]))
|
||||
|
||||
dimension := strings.TrimSpace(fmt.Sprintf("%v", applicationInsightsTarget["dimension"]))
|
||||
if applicationInsightsTarget["dimension"] != nil && len(dimension) > 0 && !strings.EqualFold(dimension, "none") {
|
||||
params.Add("segment", dimension)
|
||||
}
|
||||
|
||||
dimensionFilter := strings.TrimSpace(fmt.Sprintf("%v", applicationInsightsTarget["dimensionFilter"]))
|
||||
if applicationInsightsTarget["dimensionFilter"] != nil && len(dimensionFilter) > 0 {
|
||||
params.Add("filter", fmt.Sprintf("%v", dimensionFilter))
|
||||
}
|
||||
|
||||
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{
|
||||
RefID: query.RefId,
|
||||
IsRaw: false,
|
||||
ApiURL: azureURL,
|
||||
Params: params,
|
||||
Alias: alias,
|
||||
Target: params.Encode(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return applicationInsightsQueries, nil
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery) (*tsdb.QueryResult, error) {
|
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
|
||||
|
||||
req, err := e.createRequest(ctx, e.dsInfo)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, nil
|
||||
}
|
||||
|
||||
req.URL.Path = path.Join(req.URL.Path, query.ApiURL)
|
||||
req.URL.RawQuery = query.Params.Encode()
|
||||
|
||||
span, ctx := opentracing.StartSpanFromContext(ctx, "application insights query")
|
||||
span.SetTag("target", query.Target)
|
||||
span.SetTag("datasource_id", e.dsInfo.Id)
|
||||
span.SetTag("org_id", e.dsInfo.OrgId)
|
||||
|
||||
defer span.Finish()
|
||||
|
||||
err = opentracing.GlobalTracer().Inject(
|
||||
span.Context(),
|
||||
opentracing.HTTPHeaders,
|
||||
opentracing.HTTPHeadersCarrier(req.Header))
|
||||
|
||||
if err != nil {
|
||||
azlog.Warn("failed to inject global tracer")
|
||||
}
|
||||
|
||||
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String())
|
||||
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, nil
|
||||
}
|
||||
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
defer res.Body.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if res.StatusCode/100 != 2 {
|
||||
azlog.Error("Request failed", "status", res.Status, "body", string(body))
|
||||
return nil, fmt.Errorf(string(body))
|
||||
}
|
||||
|
||||
if query.IsRaw {
|
||||
queryResult.Series, queryResult.Meta, err = e.parseTimeSeriesFromQuery(body, query)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, nil
|
||||
}
|
||||
} else {
|
||||
queryResult.Series, err = e.parseTimeSeriesFromMetrics(body, query)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, nil
|
||||
}
|
||||
}
|
||||
|
||||
return queryResult, nil
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
||||
// find plugin
|
||||
plugin, ok := plugins.DataSources[dsInfo.Type]
|
||||
if !ok {
|
||||
return nil, errors.New("Unable to find datasource plugin Azure Application Insights")
|
||||
}
|
||||
|
||||
var appInsightsRoute *plugins.AppPluginRoute
|
||||
for _, route := range plugin.Routes {
|
||||
if route.Path == "appinsights" {
|
||||
appInsightsRoute = route
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
appInsightsAppId := dsInfo.JsonData.Get("appInsightsAppId").MustString()
|
||||
proxyPass := fmt.Sprintf("appinsights/v1/apps/%s", appInsightsAppId)
|
||||
|
||||
u, _ := url.Parse(dsInfo.Url)
|
||||
u.Path = path.Join(u.Path, fmt.Sprintf("/v1/apps/%s", appInsightsAppId))
|
||||
|
||||
req, err := http.NewRequest(http.MethodGet, u.String(), nil)
|
||||
if err != nil {
|
||||
azlog.Error("Failed to create request", "error", err)
|
||||
return nil, fmt.Errorf("Failed to create request. error: %v", err)
|
||||
}
|
||||
|
||||
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
|
||||
|
||||
pluginproxy.ApplyRoute(ctx, req, proxyPass, appInsightsRoute, dsInfo)
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) parseTimeSeriesFromQuery(body []byte, query *ApplicationInsightsQuery) (tsdb.TimeSeriesSlice, *simplejson.Json, error) {
|
||||
var data ApplicationInsightsQueryResponse
|
||||
err := json.Unmarshal(body, &data)
|
||||
if err != nil {
|
||||
azlog.Error("Failed to unmarshal Application Insights response", "error", err, "body", string(body))
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
type Metadata struct {
|
||||
Columns []string `json:"columns"`
|
||||
}
|
||||
|
||||
meta := Metadata{}
|
||||
|
||||
for _, t := range data.Tables {
|
||||
if t.Name == "PrimaryResult" {
|
||||
timeIndex, valueIndex, segmentIndex := -1, -1, -1
|
||||
meta.Columns = make([]string, 0)
|
||||
for i, v := range t.Columns {
|
||||
meta.Columns = append(meta.Columns, v.Name)
|
||||
switch v.Name {
|
||||
case query.TimeColumnName:
|
||||
timeIndex = i
|
||||
case query.ValueColumnName:
|
||||
valueIndex = i
|
||||
case query.SegmentColumnName:
|
||||
segmentIndex = i
|
||||
}
|
||||
}
|
||||
|
||||
if timeIndex == -1 {
|
||||
azlog.Info("no time column specified, returning existing columns, no data")
|
||||
return nil, simplejson.NewFromAny(meta), nil
|
||||
}
|
||||
|
||||
if valueIndex == -1 {
|
||||
azlog.Info("no value column specified, returning existing columns, no data")
|
||||
return nil, simplejson.NewFromAny(meta), nil
|
||||
}
|
||||
|
||||
var getPoints func([]interface{}) *tsdb.TimeSeriesPoints
|
||||
slice := tsdb.TimeSeriesSlice{}
|
||||
if segmentIndex == -1 {
|
||||
legend := formatApplicationInsightsLegendKey(query.Alias, query.ValueColumnName, "", "")
|
||||
series := tsdb.NewTimeSeries(legend, []tsdb.TimePoint{})
|
||||
slice = append(slice, series)
|
||||
getPoints = func(row []interface{}) *tsdb.TimeSeriesPoints {
|
||||
return &series.Points
|
||||
}
|
||||
} else {
|
||||
mapping := map[string]*tsdb.TimeSeriesPoints{}
|
||||
getPoints = func(row []interface{}) *tsdb.TimeSeriesPoints {
|
||||
segment := fmt.Sprintf("%v", row[segmentIndex])
|
||||
if points, ok := mapping[segment]; ok {
|
||||
return points
|
||||
}
|
||||
legend := formatApplicationInsightsLegendKey(query.Alias, query.ValueColumnName, query.SegmentColumnName, segment)
|
||||
series := tsdb.NewTimeSeries(legend, []tsdb.TimePoint{})
|
||||
slice = append(slice, series)
|
||||
mapping[segment] = &series.Points
|
||||
return &series.Points
|
||||
}
|
||||
}
|
||||
|
||||
for _, r := range t.Rows {
|
||||
timeStr, ok := r[timeIndex].(string)
|
||||
if !ok {
|
||||
return nil, simplejson.NewFromAny(meta), errors.New("invalid time value")
|
||||
}
|
||||
timeValue, err := time.Parse(time.RFC3339Nano, timeStr)
|
||||
if err != nil {
|
||||
return nil, simplejson.NewFromAny(meta), err
|
||||
}
|
||||
|
||||
var value float64
|
||||
if value, err = getFloat(r[valueIndex]); err != nil {
|
||||
return nil, simplejson.NewFromAny(meta), err
|
||||
}
|
||||
|
||||
points := getPoints(r)
|
||||
*points = append(*points, tsdb.NewTimePoint(null.FloatFrom(value), float64(timeValue.Unix()*1000)))
|
||||
}
|
||||
|
||||
return slice, simplejson.NewFromAny(meta), nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil, errors.New("could not find table")
|
||||
}
|
||||
|
||||
func (e *ApplicationInsightsDatasource) parseTimeSeriesFromMetrics(body []byte, query *ApplicationInsightsQuery) (tsdb.TimeSeriesSlice, error) {
|
||||
doc, err := simplejson.NewJson(body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
value := doc.Get("value").MustMap()
|
||||
|
||||
if value == nil {
|
||||
return nil, errors.New("could not find value element")
|
||||
}
|
||||
|
||||
endStr, ok := value["end"].(string)
|
||||
if !ok {
|
||||
return nil, errors.New("missing 'end' value in response")
|
||||
}
|
||||
endTime, err := time.Parse(time.RFC3339Nano, endStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("bad 'end' value: %v", err)
|
||||
}
|
||||
|
||||
for k, v := range value {
|
||||
switch k {
|
||||
case "start":
|
||||
case "end":
|
||||
case "interval":
|
||||
case "segments":
|
||||
// we have segments!
|
||||
return parseSegmentedValueTimeSeries(query, endTime, v)
|
||||
default:
|
||||
return parseSingleValueTimeSeries(query, k, endTime, v)
|
||||
}
|
||||
}
|
||||
|
||||
azlog.Error("Bad response from application insights/metrics", "body", string(body))
|
||||
return nil, errors.New("could not find expected values in response")
|
||||
}
|
||||
|
||||
func parseSegmentedValueTimeSeries(query *ApplicationInsightsQuery, endTime time.Time, segmentsJson interface{}) (tsdb.TimeSeriesSlice, error) {
|
||||
segments, ok := segmentsJson.([]interface{})
|
||||
if !ok {
|
||||
return nil, errors.New("bad segments value")
|
||||
}
|
||||
|
||||
slice := tsdb.TimeSeriesSlice{}
|
||||
seriesMap := map[string]*tsdb.TimeSeriesPoints{}
|
||||
|
||||
for _, segment := range segments {
|
||||
segmentMap, ok := segment.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, errors.New("bad segments value")
|
||||
}
|
||||
err := processSegment(&slice, segmentMap, query, endTime, seriesMap)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return slice, nil
|
||||
}
|
||||
|
||||
func processSegment(slice *tsdb.TimeSeriesSlice, segment map[string]interface{}, query *ApplicationInsightsQuery, endTime time.Time, pointMap map[string]*tsdb.TimeSeriesPoints) error {
|
||||
var segmentName string
|
||||
var segmentValue string
|
||||
var childSegments []interface{}
|
||||
hasChildren := false
|
||||
var value float64
|
||||
var valueName string
|
||||
var ok bool
|
||||
var err error
|
||||
for k, v := range segment {
|
||||
switch k {
|
||||
case "start":
|
||||
case "end":
|
||||
endStr, ok := v.(string)
|
||||
if !ok {
|
||||
return errors.New("missing 'end' value in response")
|
||||
}
|
||||
endTime, err = time.Parse(time.RFC3339Nano, endStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("bad 'end' value: %v", err)
|
||||
}
|
||||
case "segments":
|
||||
childSegments, ok = v.([]interface{})
|
||||
if !ok {
|
||||
return errors.New("invalid format segments")
|
||||
}
|
||||
hasChildren = true
|
||||
default:
|
||||
mapping, hasValues := v.(map[string]interface{})
|
||||
if hasValues {
|
||||
valueName = k
|
||||
value, err = getAggregatedValue(mapping, valueName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
segmentValue, ok = v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("invalid mapping for key %v", k)
|
||||
}
|
||||
segmentName = k
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if hasChildren {
|
||||
for _, s := range childSegments {
|
||||
segmentMap, ok := s.(map[string]interface{})
|
||||
if !ok {
|
||||
return errors.New("invalid format segments")
|
||||
}
|
||||
if err := processSegment(slice, segmentMap, query, endTime, pointMap); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
aliased := formatApplicationInsightsLegendKey(query.Alias, valueName, segmentName, segmentValue)
|
||||
|
||||
if segmentValue == "" {
|
||||
segmentValue = valueName
|
||||
}
|
||||
|
||||
points, ok := pointMap[segmentValue]
|
||||
|
||||
if !ok {
|
||||
series := tsdb.NewTimeSeries(aliased, tsdb.TimeSeriesPoints{})
|
||||
points = &series.Points
|
||||
*slice = append(*slice, series)
|
||||
pointMap[segmentValue] = points
|
||||
}
|
||||
|
||||
*points = append(*points, tsdb.NewTimePoint(null.FloatFrom(value), float64(endTime.Unix()*1000)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseSingleValueTimeSeries(query *ApplicationInsightsQuery, metricName string, endTime time.Time, valueJson interface{}) (tsdb.TimeSeriesSlice, error) {
|
||||
legend := formatApplicationInsightsLegendKey(query.Alias, metricName, "", "")
|
||||
|
||||
valueMap, ok := valueJson.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, errors.New("bad value aggregation")
|
||||
}
|
||||
|
||||
metricValue, err := getAggregatedValue(valueMap, metricName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return []*tsdb.TimeSeries{
|
||||
tsdb.NewTimeSeries(
|
||||
legend,
|
||||
tsdb.TimeSeriesPoints{
|
||||
tsdb.NewTimePoint(
|
||||
null.FloatFrom(metricValue),
|
||||
float64(endTime.Unix()*1000)),
|
||||
},
|
||||
),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func getAggregatedValue(valueMap map[string]interface{}, valueName string) (float64, error) {
|
||||
|
||||
aggValue := ""
|
||||
var metricValue float64
|
||||
var err error
|
||||
for k, v := range valueMap {
|
||||
if aggValue != "" {
|
||||
return 0, fmt.Errorf("found multiple aggregations, %v, %v", aggValue, k)
|
||||
}
|
||||
if k == "" {
|
||||
return 0, errors.New("found no aggregation name")
|
||||
}
|
||||
aggValue = k
|
||||
metricValue, err = getFloat(v)
|
||||
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("bad value: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if aggValue == "" {
|
||||
return 0, fmt.Errorf("no aggregation value found for %v", valueName)
|
||||
}
|
||||
|
||||
return metricValue, nil
|
||||
}
|
||||
|
||||
func getFloat(in interface{}) (float64, error) {
|
||||
if out, ok := in.(float32); ok {
|
||||
return float64(out), nil
|
||||
} else if out, ok := in.(int32); ok {
|
||||
return float64(out), nil
|
||||
} else if out, ok := in.(json.Number); ok {
|
||||
return out.Float64()
|
||||
} else if out, ok := in.(int64); ok {
|
||||
return float64(out), nil
|
||||
} else if out, ok := in.(float64); ok {
|
||||
return out, nil
|
||||
}
|
||||
|
||||
return 0, fmt.Errorf("cannot convert '%v' to float32", in)
|
||||
}
|
||||
|
||||
// formatApplicationInsightsLegendKey builds the legend key or timeseries name
|
||||
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
|
||||
func formatApplicationInsightsLegendKey(alias string, metricName string, dimensionName string, dimensionValue string) string {
|
||||
if alias == "" {
|
||||
if len(dimensionName) > 0 {
|
||||
return fmt.Sprintf("{%s=%s}.%s", dimensionName, dimensionValue, metricName)
|
||||
}
|
||||
return metricName
|
||||
}
|
||||
|
||||
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
|
||||
metaPartName := strings.Replace(string(in), "{{", "", 1)
|
||||
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
|
||||
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
|
||||
|
||||
switch metaPartName {
|
||||
case "metric":
|
||||
return []byte(metricName)
|
||||
case "dimensionname", "groupbyname":
|
||||
return []byte(dimensionName)
|
||||
case "dimensionvalue", "groupbyvalue":
|
||||
return []byte(dimensionValue)
|
||||
}
|
||||
|
||||
return in
|
||||
})
|
||||
|
||||
return string(result)
|
||||
}
|
316
pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go
Normal file
316
pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go
Normal file
@ -0,0 +1,316 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestApplicationInsightsDatasource(t *testing.T) {
|
||||
Convey("ApplicationInsightsDatasource", t, func() {
|
||||
datasource := &ApplicationInsightsDatasource{}
|
||||
|
||||
Convey("Parse queries from frontend and build AzureMonitor API queries", func() {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
tsdbQuery := &tsdb.TsdbQuery{
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
DataSource: &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{}),
|
||||
},
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"appInsights": map[string]interface{}{
|
||||
"rawQuery": false,
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"metricName": "server/exceptions",
|
||||
"alias": "testalias",
|
||||
"queryType": "Application Insights",
|
||||
},
|
||||
}),
|
||||
RefId: "A",
|
||||
IntervalMs: 1234,
|
||||
},
|
||||
},
|
||||
}
|
||||
Convey("and is a normal query", func() {
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].RefID, ShouldEqual, "A")
|
||||
So(queries[0].ApiURL, ShouldEqual, "metrics/server/exceptions")
|
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&interval=PT1M×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
|
||||
So(len(queries[0].Params), ShouldEqual, 3)
|
||||
So(queries[0].Params["timespan"][0], ShouldEqual, "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z")
|
||||
So(queries[0].Params["aggregation"][0], ShouldEqual, "Average")
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT1M")
|
||||
So(queries[0].Alias, ShouldEqual, "testalias")
|
||||
})
|
||||
|
||||
Convey("and has a time grain set to auto", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"appInsights": map[string]interface{}{
|
||||
"rawQuery": false,
|
||||
"timeGrain": "auto",
|
||||
"aggregation": "Average",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Application Insights",
|
||||
},
|
||||
})
|
||||
tsdbQuery.Queries[0].IntervalMs = 400000
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT15M")
|
||||
})
|
||||
|
||||
Convey("and has a time grain set to auto and the metric has a limited list of allowed time grains", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"appInsights": map[string]interface{}{
|
||||
"rawQuery": false,
|
||||
"timeGrain": "auto",
|
||||
"aggregation": "Average",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Application Insights",
|
||||
"allowedTimeGrainsMs": []interface{}{"auto", json.Number("60000"), json.Number("300000")},
|
||||
},
|
||||
})
|
||||
tsdbQuery.Queries[0].IntervalMs = 400000
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Params["interval"][0], ShouldEqual, "PT5M")
|
||||
})
|
||||
|
||||
Convey("and has a dimension filter", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"appInsights": map[string]interface{}{
|
||||
"rawQuery": false,
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Application Insights",
|
||||
"dimension": "blob",
|
||||
"dimensionFilter": "blob eq '*'",
|
||||
},
|
||||
})
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&filter=blob+eq+%27%2A%27&interval=PT1M&segment=blob×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
|
||||
So(queries[0].Params["filter"][0], ShouldEqual, "blob eq '*'")
|
||||
|
||||
})
|
||||
|
||||
Convey("and has a dimension filter set to None", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"appInsights": map[string]interface{}{
|
||||
"rawQuery": false,
|
||||
"timeGrain": "PT1M",
|
||||
"aggregation": "Average",
|
||||
"metricName": "Percentage CPU",
|
||||
"alias": "testalias",
|
||||
"queryType": "Application Insights",
|
||||
"dimension": "None",
|
||||
},
|
||||
})
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(queries[0].Target, ShouldEqual, "aggregation=Average&interval=PT1M×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
|
||||
})
|
||||
|
||||
Convey("id a raw query", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"appInsights": map[string]interface{}{
|
||||
"rawQuery": true,
|
||||
"rawQueryString": "exceptions | where $__timeFilter(timestamp) | summarize count=count() by bin(timestamp, $__interval)",
|
||||
"timeColumn": "timestamp",
|
||||
"valueColumn": "count",
|
||||
},
|
||||
})
|
||||
|
||||
queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["query"][0], ShouldEqual, "exceptions | where ['timestamp'] >= datetime('2018-03-15T13:00:00Z') and ['timestamp'] <= datetime('2018-03-15T13:34:00Z') | summarize count=count() by bin(timestamp, 1234ms)")
|
||||
So(queries[0].Target, ShouldEqual, "query=exceptions+%7C+where+%5B%27timestamp%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27timestamp%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+summarize+count%3Dcount%28%29+by+bin%28timestamp%2C+1234ms%29")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Parse Application Insights query API response in the time series format", func() {
|
||||
Convey("no segments", func() {
|
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/1-application-insights-response-raw-query.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
query := &ApplicationInsightsQuery{
|
||||
IsRaw: true,
|
||||
TimeColumnName: "timestamp",
|
||||
ValueColumnName: "value",
|
||||
}
|
||||
series, _, err := datasource.parseTimeSeriesFromQuery(data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(series), ShouldEqual, 1)
|
||||
So(series[0].Name, ShouldEqual, "value")
|
||||
So(len(series[0].Points), ShouldEqual, 2)
|
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568336523000))
|
||||
|
||||
So(series[0].Points[1][0].Float64, ShouldEqual, 2)
|
||||
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568340123000))
|
||||
})
|
||||
|
||||
Convey("with segments", func() {
|
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/2-application-insights-response-raw-query-segmented.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
query := &ApplicationInsightsQuery{
|
||||
IsRaw: true,
|
||||
TimeColumnName: "timestamp",
|
||||
ValueColumnName: "value",
|
||||
SegmentColumnName: "segment",
|
||||
}
|
||||
series, _, err := datasource.parseTimeSeriesFromQuery(data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(series), ShouldEqual, 2)
|
||||
So(series[0].Name, ShouldEqual, "{segment=a}.value")
|
||||
So(len(series[0].Points), ShouldEqual, 2)
|
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568336523000))
|
||||
|
||||
So(series[0].Points[1][0].Float64, ShouldEqual, 3)
|
||||
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568426523000))
|
||||
|
||||
So(series[1].Name, ShouldEqual, "{segment=b}.value")
|
||||
So(series[1].Points[0][0].Float64, ShouldEqual, 2)
|
||||
So(series[1].Points[0][1].Float64, ShouldEqual, int64(1568336523000))
|
||||
|
||||
So(series[1].Points[1][0].Float64, ShouldEqual, 4)
|
||||
So(series[1].Points[1][1].Float64, ShouldEqual, int64(1568426523000))
|
||||
|
||||
Convey("with alias", func() {
|
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/2-application-insights-response-raw-query-segmented.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
query := &ApplicationInsightsQuery{
|
||||
IsRaw: true,
|
||||
TimeColumnName: "timestamp",
|
||||
ValueColumnName: "value",
|
||||
SegmentColumnName: "segment",
|
||||
Alias: "{{metric}} {{dimensionname}} {{dimensionvalue}}",
|
||||
}
|
||||
series, _, err := datasource.parseTimeSeriesFromQuery(data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(series), ShouldEqual, 2)
|
||||
So(series[0].Name, ShouldEqual, "value segment a")
|
||||
So(series[1].Name, ShouldEqual, "value segment b")
|
||||
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Parse Application Insights metrics API", func() {
|
||||
Convey("single value", func() {
|
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/3-application-insights-response-metrics-single-value.json")
|
||||
So(err, ShouldBeNil)
|
||||
query := &ApplicationInsightsQuery{
|
||||
IsRaw: false,
|
||||
}
|
||||
series, err := datasource.parseTimeSeriesFromMetrics(data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(series), ShouldEqual, 1)
|
||||
So(series[0].Name, ShouldEqual, "value")
|
||||
So(len(series[0].Points), ShouldEqual, 1)
|
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1.2)
|
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000))
|
||||
})
|
||||
|
||||
Convey("1H separation", func() {
|
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-no-segment.json")
|
||||
So(err, ShouldBeNil)
|
||||
query := &ApplicationInsightsQuery{
|
||||
IsRaw: false,
|
||||
}
|
||||
series, err := datasource.parseTimeSeriesFromMetrics(data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(series), ShouldEqual, 1)
|
||||
So(series[0].Name, ShouldEqual, "value")
|
||||
So(len(series[0].Points), ShouldEqual, 2)
|
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000))
|
||||
So(series[0].Points[1][0].Float64, ShouldEqual, 2)
|
||||
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568343723000))
|
||||
|
||||
Convey("with segmentation", func() {
|
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-segmented.json")
|
||||
So(err, ShouldBeNil)
|
||||
query := &ApplicationInsightsQuery{
|
||||
IsRaw: false,
|
||||
}
|
||||
series, err := datasource.parseTimeSeriesFromMetrics(data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(series), ShouldEqual, 2)
|
||||
So(series[0].Name, ShouldEqual, "{blob=a}.value")
|
||||
So(len(series[0].Points), ShouldEqual, 2)
|
||||
|
||||
So(series[0].Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(series[0].Points[0][1].Float64, ShouldEqual, int64(1568340123000))
|
||||
So(series[0].Points[1][0].Float64, ShouldEqual, 2)
|
||||
So(series[0].Points[1][1].Float64, ShouldEqual, int64(1568343723000))
|
||||
|
||||
So(series[1].Name, ShouldEqual, "{blob=b}.value")
|
||||
So(len(series[1].Points), ShouldEqual, 2)
|
||||
|
||||
So(series[1].Points[0][0].Float64, ShouldEqual, 3)
|
||||
So(series[1].Points[0][1].Float64, ShouldEqual, int64(1568340123000))
|
||||
So(series[1].Points[1][0].Float64, ShouldEqual, 4)
|
||||
So(series[1].Points[1][1].Float64, ShouldEqual, int64(1568343723000))
|
||||
|
||||
Convey("with alias", func() {
|
||||
data, err := ioutil.ReadFile("./test-data/applicationinsights/4-application-insights-response-metrics-segmented.json")
|
||||
So(err, ShouldBeNil)
|
||||
query := &ApplicationInsightsQuery{
|
||||
IsRaw: false,
|
||||
Alias: "{{metric}} {{dimensionname}} {{dimensionvalue}}",
|
||||
}
|
||||
series, err := datasource.parseTimeSeriesFromMetrics(data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(series), ShouldEqual, 2)
|
||||
So(series[0].Name, ShouldEqual, "value blob a")
|
||||
So(series[1].Name, ShouldEqual, "value blob b")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
@ -107,7 +107,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
|
||||
timeGrain := fmt.Sprintf("%v", azureMonitorTarget["timeGrain"])
|
||||
timeGrains := azureMonitorTarget["allowedTimeGrainsMs"]
|
||||
if timeGrain == "auto" {
|
||||
timeGrain, err = e.setAutoTimeGrain(query.IntervalMs, timeGrains)
|
||||
timeGrain, err = setAutoTimeGrain(query.IntervalMs, timeGrains)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -147,35 +147,6 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *
|
||||
return azureMonitorQueries, nil
|
||||
}
|
||||
|
||||
// setAutoTimeGrain tries to find the closest interval to the query's intervalMs value
|
||||
// if the metric has a limited set of possible intervals/time grains then use those
|
||||
// instead of the default list of intervals
|
||||
func (e *AzureMonitorDatasource) setAutoTimeGrain(intervalMs int64, timeGrains interface{}) (string, error) {
|
||||
// parses array of numbers from the timeGrains json field
|
||||
allowedTimeGrains := []int64{}
|
||||
tgs, ok := timeGrains.([]interface{})
|
||||
if ok {
|
||||
for _, v := range tgs {
|
||||
jsonNumber, ok := v.(json.Number)
|
||||
if ok {
|
||||
tg, err := jsonNumber.Int64()
|
||||
if err == nil {
|
||||
allowedTimeGrains = append(allowedTimeGrains, tg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
autoInterval := e.findClosestAllowedIntervalMS(intervalMs, allowedTimeGrains)
|
||||
tg := &TimeGrain{}
|
||||
autoTimeGrain, err := tg.createISO8601DurationFromIntervalMS(autoInterval)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return autoTimeGrain, nil
|
||||
}
|
||||
|
||||
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.QueryResult, AzureMonitorResponse, error) {
|
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
|
||||
|
||||
@ -203,7 +174,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
|
||||
opentracing.HTTPHeaders,
|
||||
opentracing.HTTPHeadersCarrier(req.Header))
|
||||
|
||||
azlog.Debug("AzureMonitor", "Request URL", req.URL.String())
|
||||
azlog.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
|
||||
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
@ -290,7 +261,7 @@ func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data
|
||||
metadataName = series.Metadatavalues[0].Name.LocalizedValue
|
||||
metadataValue = series.Metadatavalues[0].Value
|
||||
}
|
||||
metricName := formatLegendKey(query.Alias, query.UrlComponents["resourceName"], data.Value[0].Name.LocalizedValue, metadataName, metadataValue, data.Namespace, data.Value[0].ID)
|
||||
metricName := formatAzureMonitorLegendKey(query.Alias, query.UrlComponents["resourceName"], data.Value[0].Name.LocalizedValue, metadataName, metadataValue, data.Namespace, data.Value[0].ID)
|
||||
|
||||
for _, point := range series.Data {
|
||||
var value float64
|
||||
@ -321,35 +292,9 @@ func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, data
|
||||
return nil
|
||||
}
|
||||
|
||||
// findClosestAllowedIntervalMs is used for the auto time grain setting.
|
||||
// It finds the closest time grain from the list of allowed time grains for Azure Monitor
|
||||
// using the Grafana interval in milliseconds
|
||||
// Some metrics only allow a limited list of time grains. The allowedTimeGrains parameter
|
||||
// allows overriding the default list of allowed time grains.
|
||||
func (e *AzureMonitorDatasource) findClosestAllowedIntervalMS(intervalMs int64, allowedTimeGrains []int64) int64 {
|
||||
allowedIntervals := defaultAllowedIntervalsMS
|
||||
|
||||
if len(allowedTimeGrains) > 0 {
|
||||
allowedIntervals = allowedTimeGrains
|
||||
}
|
||||
|
||||
closest := allowedIntervals[0]
|
||||
|
||||
for i, allowed := range allowedIntervals {
|
||||
if intervalMs > allowed {
|
||||
if i+1 < len(allowedIntervals) {
|
||||
closest = allowedIntervals[i+1]
|
||||
} else {
|
||||
closest = allowed
|
||||
}
|
||||
}
|
||||
}
|
||||
return closest
|
||||
}
|
||||
|
||||
// formatLegendKey builds the legend key or timeseries name
|
||||
// formatAzureMonitorLegendKey builds the legend key or timeseries name
|
||||
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
|
||||
func formatLegendKey(alias string, resourceName string, metricName string, metadataName string, metadataValue string, namespace string, seriesID string) string {
|
||||
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string, metadataValue string, namespace string, seriesID string) string {
|
||||
if alias == "" {
|
||||
if len(metadataName) > 0 {
|
||||
return fmt.Sprintf("%s{%s=%s}.%s", resourceName, metadataName, metadataValue, metricName)
|
||||
|
@ -167,7 +167,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
|
||||
Convey("Parse AzureMonitor API response in the time series format", func() {
|
||||
Convey("when data from query aggregated as average to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/1-azure-monitor-response-avg.json")
|
||||
data, err := loadTestFile("./test-data/azuremonitor/1-azure-monitor-response-avg.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(data.Interval, ShouldEqual, "PT1M")
|
||||
|
||||
@ -204,7 +204,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as total to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/2-azure-monitor-response-total.json")
|
||||
data, err := loadTestFile("./test-data/azuremonitor/2-azure-monitor-response-total.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
@ -224,7 +224,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as maximum to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/3-azure-monitor-response-maximum.json")
|
||||
data, err := loadTestFile("./test-data/azuremonitor/3-azure-monitor-response-maximum.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
@ -244,7 +244,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as minimum to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/4-azure-monitor-response-minimum.json")
|
||||
data, err := loadTestFile("./test-data/azuremonitor/4-azure-monitor-response-minimum.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
@ -264,7 +264,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as Count to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/5-azure-monitor-response-count.json")
|
||||
data, err := loadTestFile("./test-data/azuremonitor/5-azure-monitor-response-count.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
@ -284,7 +284,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("when data from query aggregated as total and has dimension filter", func() {
|
||||
data, err := loadTestFile("./test-data/6-azure-monitor-response-multi-dimension.json")
|
||||
data, err := loadTestFile("./test-data/azuremonitor/6-azure-monitor-response-multi-dimension.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
@ -311,7 +311,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("when data from query has alias patterns", func() {
|
||||
data, err := loadTestFile("./test-data/2-azure-monitor-response-total.json")
|
||||
data, err := loadTestFile("./test-data/azuremonitor/2-azure-monitor-response-total.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
@ -331,7 +331,7 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("when data has dimension filters and alias patterns", func() {
|
||||
data, err := loadTestFile("./test-data/6-azure-monitor-response-multi-dimension.json")
|
||||
data, err := loadTestFile("./test-data/azuremonitor/6-azure-monitor-response-multi-dimension.json")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
@ -363,16 +363,16 @@ func TestAzureMonitorDatasource(t *testing.T) {
|
||||
"2d": 172800000,
|
||||
}
|
||||
|
||||
closest := datasource.findClosestAllowedIntervalMS(intervals["3m"], []int64{})
|
||||
closest := findClosestAllowedIntervalMS(intervals["3m"], []int64{})
|
||||
So(closest, ShouldEqual, intervals["5m"])
|
||||
|
||||
closest = datasource.findClosestAllowedIntervalMS(intervals["10m"], []int64{})
|
||||
closest = findClosestAllowedIntervalMS(intervals["10m"], []int64{})
|
||||
So(closest, ShouldEqual, intervals["15m"])
|
||||
|
||||
closest = datasource.findClosestAllowedIntervalMS(intervals["2d"], []int64{})
|
||||
closest = findClosestAllowedIntervalMS(intervals["2d"], []int64{})
|
||||
So(closest, ShouldEqual, intervals["1d"])
|
||||
|
||||
closest = datasource.findClosestAllowedIntervalMS(intervals["3m"], []int64{intervals["1d"]})
|
||||
closest = findClosestAllowedIntervalMS(intervals["3m"], []int64{intervals["1d"]})
|
||||
So(closest, ShouldEqual, intervals["1d"])
|
||||
})
|
||||
})
|
||||
|
58
pkg/tsdb/azuremonitor/azuremonitor-time.go
Normal file
58
pkg/tsdb/azuremonitor/azuremonitor-time.go
Normal file
@ -0,0 +1,58 @@
|
||||
package azuremonitor
|
||||
|
||||
import "encoding/json"
|
||||
|
||||
// setAutoTimeGrain tries to find the closest interval to the query's intervalMs value
|
||||
// if the metric has a limited set of possible intervals/time grains then use those
|
||||
// instead of the default list of intervals
|
||||
func setAutoTimeGrain(intervalMs int64, timeGrains interface{}) (string, error) {
|
||||
// parses array of numbers from the timeGrains json field
|
||||
allowedTimeGrains := []int64{}
|
||||
tgs, ok := timeGrains.([]interface{})
|
||||
if ok {
|
||||
for _, v := range tgs {
|
||||
jsonNumber, ok := v.(json.Number)
|
||||
if ok {
|
||||
tg, err := jsonNumber.Int64()
|
||||
if err == nil {
|
||||
allowedTimeGrains = append(allowedTimeGrains, tg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
autoInterval := findClosestAllowedIntervalMS(intervalMs, allowedTimeGrains)
|
||||
tg := &TimeGrain{}
|
||||
autoTimeGrain, err := tg.createISO8601DurationFromIntervalMS(autoInterval)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return autoTimeGrain, nil
|
||||
}
|
||||
|
||||
// findClosestAllowedIntervalMs is used for the auto time grain setting.
|
||||
// It finds the closest time grain from the list of allowed time grains for Azure Monitor
|
||||
// using the Grafana interval in milliseconds
|
||||
// Some metrics only allow a limited list of time grains. The allowedTimeGrains parameter
|
||||
// allows overriding the default list of allowed time grains.
|
||||
func findClosestAllowedIntervalMS(intervalMs int64, allowedTimeGrains []int64) int64 {
|
||||
allowedIntervals := defaultAllowedIntervalsMS
|
||||
|
||||
if len(allowedTimeGrains) > 0 {
|
||||
allowedIntervals = allowedTimeGrains
|
||||
}
|
||||
|
||||
closest := allowedIntervals[0]
|
||||
|
||||
for i, allowed := range allowedIntervals {
|
||||
if intervalMs > allowed {
|
||||
if i+1 < len(allowedIntervals) {
|
||||
closest = allowedIntervals[i+1]
|
||||
} else {
|
||||
closest = allowed
|
||||
}
|
||||
}
|
||||
}
|
||||
return closest
|
||||
}
|
@ -46,10 +46,10 @@ func init() {
|
||||
// executes the queries against the API and parses the response into
|
||||
// the right format
|
||||
func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
var result *tsdb.Response
|
||||
var err error
|
||||
|
||||
var azureMonitorQueries []*tsdb.Query
|
||||
var applicationInsightsQueries []*tsdb.Query
|
||||
|
||||
for _, query := range tsdbQuery.Queries {
|
||||
queryType := query.Model.Get("queryType").MustString("")
|
||||
@ -57,6 +57,8 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou
|
||||
switch queryType {
|
||||
case "Azure Monitor":
|
||||
azureMonitorQueries = append(azureMonitorQueries, query)
|
||||
case "Application Insights":
|
||||
applicationInsightsQueries = append(applicationInsightsQueries, query)
|
||||
default:
|
||||
return nil, fmt.Errorf("Alerting not supported for %s", queryType)
|
||||
}
|
||||
@ -67,7 +69,24 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou
|
||||
dsInfo: e.dsInfo,
|
||||
}
|
||||
|
||||
result, err = azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, tsdbQuery.TimeRange)
|
||||
aiDatasource := &ApplicationInsightsDatasource{
|
||||
httpClient: e.httpClient,
|
||||
dsInfo: e.dsInfo,
|
||||
}
|
||||
|
||||
return result, err
|
||||
azResult, err := azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, tsdbQuery.TimeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
aiResult, err := aiDatasource.executeTimeSeriesQuery(ctx, applicationInsightsQueries, tsdbQuery.TimeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for k, v := range aiResult.Results {
|
||||
azResult.Results[k] = v
|
||||
}
|
||||
|
||||
return azResult, nil
|
||||
}
|
||||
|
118
pkg/tsdb/azuremonitor/macros.go
Normal file
118
pkg/tsdb/azuremonitor/macros.go
Normal file
@ -0,0 +1,118 @@
|
||||
package azuremonitor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
const rsIdentifier = `([_a-zA-Z0-9]+)`
|
||||
const sExpr = `\$` + rsIdentifier + `(?:\(([^\)]*)\))?`
|
||||
|
||||
type kqlMacroEngine struct {
|
||||
timeRange *tsdb.TimeRange
|
||||
query *tsdb.Query
|
||||
}
|
||||
|
||||
func KqlInterpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string) (string, error) {
|
||||
engine := kqlMacroEngine{}
|
||||
return engine.Interpolate(query, timeRange, kql)
|
||||
}
|
||||
|
||||
func (m *kqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string) (string, error) {
|
||||
m.timeRange = timeRange
|
||||
m.query = query
|
||||
rExp, _ := regexp.Compile(sExpr)
|
||||
var macroError error
|
||||
|
||||
kql = m.ReplaceAllStringSubmatchFunc(rExp, kql, func(groups []string) string {
|
||||
args := []string{}
|
||||
if len(groups) > 2 {
|
||||
args = strings.Split(groups[2], ",")
|
||||
}
|
||||
|
||||
for i, arg := range args {
|
||||
args[i] = strings.Trim(arg, " ")
|
||||
}
|
||||
res, err := m.evaluateMacro(groups[1], args)
|
||||
if err != nil && macroError == nil {
|
||||
macroError = err
|
||||
return "macro_error()"
|
||||
}
|
||||
return res
|
||||
})
|
||||
|
||||
if macroError != nil {
|
||||
return "", macroError
|
||||
}
|
||||
|
||||
return kql, nil
|
||||
}
|
||||
|
||||
func (m *kqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
|
||||
switch name {
|
||||
case "__timeFilter":
|
||||
timeColumn := "timestamp"
|
||||
if len(args) > 0 && args[0] != "" {
|
||||
timeColumn = args[0]
|
||||
}
|
||||
return fmt.Sprintf("['%s'] >= datetime('%s') and ['%s'] <= datetime('%s')", timeColumn, m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), timeColumn, m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
|
||||
case "__from":
|
||||
return fmt.Sprintf("datetime('%s')", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
|
||||
case "__to":
|
||||
return fmt.Sprintf("datetime('%s')", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
|
||||
case "__interval":
|
||||
var interval time.Duration
|
||||
if m.query.IntervalMs == 0 {
|
||||
to := m.timeRange.MustGetTo().UnixNano()
|
||||
from := m.timeRange.MustGetFrom().UnixNano()
|
||||
// default to "100 datapoints" if nothing in the query is more specific
|
||||
defaultInterval := time.Duration((to - from) / 60)
|
||||
var err error
|
||||
interval, err = tsdb.GetIntervalFrom(m.query.DataSource, m.query.Model, defaultInterval)
|
||||
if err != nil {
|
||||
azlog.Warn("Unable to get interval from query", "datasource", m.query.DataSource, "model", m.query.Model)
|
||||
interval = defaultInterval
|
||||
}
|
||||
} else {
|
||||
interval = time.Millisecond * time.Duration(m.query.IntervalMs)
|
||||
}
|
||||
return fmt.Sprintf("%dms", int(interval/time.Millisecond)), nil
|
||||
case "__contains":
|
||||
if len(args) < 2 || args[0] == "" || args[1] == "" {
|
||||
return "", fmt.Errorf("macro %v needs colName and variableSet", name)
|
||||
}
|
||||
|
||||
if args[1] == "all" {
|
||||
return "1 == 1", nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("['%s'] in ('%s')", args[0], args[1]), nil
|
||||
default:
|
||||
return "", fmt.Errorf("Unknown macro %v", name)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *kqlMacroEngine) ReplaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
|
||||
result := ""
|
||||
lastIndex := 0
|
||||
|
||||
for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
|
||||
groups := []string{}
|
||||
for i := 0; i < len(v); i += 2 {
|
||||
if v[i] < 0 {
|
||||
groups = append(groups, "")
|
||||
} else {
|
||||
groups = append(groups, str[v[i]:v[i+1]])
|
||||
}
|
||||
}
|
||||
|
||||
result += str[lastIndex:v[0]] + repl(groups)
|
||||
lastIndex = v[1]
|
||||
}
|
||||
|
||||
return result + str[lastIndex:]
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
{
|
||||
"tables": [
|
||||
{
|
||||
"name": "PrimaryResult",
|
||||
"columns": [
|
||||
{
|
||||
"name": "timestamp",
|
||||
"type": "datetime"
|
||||
},
|
||||
{
|
||||
"name": "value",
|
||||
"type": "int"
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
[
|
||||
"2019-09-13T01:02:03.456789Z",
|
||||
1
|
||||
],
|
||||
[
|
||||
"2019-09-13T02:02:03.456789Z",
|
||||
2
|
||||
]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
{
|
||||
"tables": [
|
||||
{
|
||||
"name": "PrimaryResult",
|
||||
"columns": [
|
||||
{
|
||||
"name": "timestamp",
|
||||
"type": "datetime"
|
||||
},
|
||||
{
|
||||
"name": "value",
|
||||
"type": "int"
|
||||
},
|
||||
{
|
||||
"name": "segment",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"rows": [
|
||||
[
|
||||
"2019-09-13T01:02:03.456789Z",
|
||||
1,
|
||||
"a"
|
||||
],
|
||||
[
|
||||
"2019-09-13T01:02:03.456789Z",
|
||||
2,
|
||||
"b"
|
||||
],
|
||||
[
|
||||
"2019-09-14T02:02:03.456789Z",
|
||||
3,
|
||||
"a"
|
||||
],
|
||||
[
|
||||
"2019-09-14T02:02:03.456789Z",
|
||||
4,
|
||||
"b"
|
||||
]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
{
|
||||
"value": {
|
||||
"start": "2019-09-13T01:02:03.456789Z",
|
||||
"end": "2019-09-13T02:02:03.456789Z",
|
||||
"value": {
|
||||
"avg": 1.2
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
{
|
||||
"value": {
|
||||
"start": "2019-09-13T01:02:03.456789Z",
|
||||
"end": "2019-09-13T03:02:03.456789Z",
|
||||
"interval": "PT1H",
|
||||
"segments": [
|
||||
{
|
||||
"start": "2019-09-13T01:02:03.456789Z",
|
||||
"end": "2019-09-13T02:02:03.456789Z",
|
||||
"value": {
|
||||
"avg": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"start": "2019-09-13T02:02:03.456789Z",
|
||||
"end": "2019-09-13T03:02:03.456789Z",
|
||||
"value": {
|
||||
"avg": 2
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
@ -0,0 +1,45 @@
|
||||
{
|
||||
"value": {
|
||||
"start": "2019-09-13T01:02:03.456789Z",
|
||||
"end": "2019-09-13T03:02:03.456789Z",
|
||||
"interval": "PT1H",
|
||||
"segments": [
|
||||
{
|
||||
"start": "2019-09-13T01:02:03.456789Z",
|
||||
"end": "2019-09-13T02:02:03.456789Z",
|
||||
"segments": [
|
||||
{
|
||||
"value": {
|
||||
"avg": 1
|
||||
},
|
||||
"blob": "a"
|
||||
},
|
||||
{
|
||||
"value": {
|
||||
"avg": 3
|
||||
},
|
||||
"blob": "b"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"start": "2019-09-13T02:02:03.456789Z",
|
||||
"end": "2019-09-13T03:02:03.456789Z",
|
||||
"segments": [
|
||||
{
|
||||
"value": {
|
||||
"avg": 2
|
||||
},
|
||||
"blob": "a"
|
||||
},
|
||||
{
|
||||
"value": {
|
||||
"avg": 4
|
||||
},
|
||||
"blob": "b"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
@ -51,17 +51,32 @@ type AzureMonitorResponse struct {
|
||||
Resourceregion string `json:"resourceregion"`
|
||||
}
|
||||
|
||||
// ApplicationInsightsResponse is the json response from the Application Insights API
|
||||
type ApplicationInsightsResponse struct {
|
||||
MetricResponse *ApplicationInsightsMetricsResponse
|
||||
QueryResponse *ApplicationInsightsQueryResponse
|
||||
}
|
||||
|
||||
// ApplicationInsightsResponse is the json response from the Application Insights API
|
||||
type ApplicationInsightsQueryResponse struct {
|
||||
Tables []struct {
|
||||
TableName string `json:"TableName"`
|
||||
Columns []struct {
|
||||
ColumnName string `json:"ColumnName"`
|
||||
DataType string `json:"DataType"`
|
||||
ColumnType string `json:"ColumnType"`
|
||||
} `json:"Columns"`
|
||||
Rows [][]interface{} `json:"Rows"`
|
||||
} `json:"Tables"`
|
||||
Name string `json:"name"`
|
||||
Columns []struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
} `json:"columns"`
|
||||
Rows [][]interface{} `json:"rows"`
|
||||
} `json:"tables"`
|
||||
}
|
||||
|
||||
// ApplicationInsightsMetricsResponse is the json response from the Application Insights API
|
||||
type ApplicationInsightsMetricsResponse struct {
|
||||
Name string
|
||||
Segments []struct {
|
||||
Start time.Time
|
||||
End time.Time
|
||||
Segmented map[string]float64
|
||||
Value float64
|
||||
}
|
||||
}
|
||||
|
||||
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// URLQueryReader is a URL query type.
|
||||
// URLQueryReader is a ApiURL query type.
|
||||
type URLQueryReader struct {
|
||||
values url.Values
|
||||
}
|
||||
@ -22,7 +22,7 @@ func NewURLQueryReader(urlInfo *url.URL) (*URLQueryReader, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Get parse parameters from an URL. If the parameter does not exist, it returns
|
||||
// Get parse parameters from an ApiURL. If the parameter does not exist, it returns
|
||||
// the default value.
|
||||
func (r *URLQueryReader) Get(name string, def string) string {
|
||||
val := r.values[name]
|
||||
@ -33,7 +33,7 @@ func (r *URLQueryReader) Get(name string, def string) string {
|
||||
return val[0]
|
||||
}
|
||||
|
||||
// JoinURLFragments joins two URL fragments into only one URL string.
|
||||
// JoinURLFragments joins two ApiURL fragments into only one ApiURL string.
|
||||
func JoinURLFragments(a, b string) string {
|
||||
aslash := strings.HasSuffix(a, "/")
|
||||
bslash := strings.HasPrefix(b, "/")
|
||||
|
@ -1,8 +1,8 @@
|
||||
import AzureMonitorDatasource from '../datasource';
|
||||
import Datasource from '../datasource';
|
||||
import { DataFrame, toUtc } from '@grafana/data';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
// @ts-ignore
|
||||
import Q from 'q';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { toUtc } from '@grafana/data';
|
||||
|
||||
describe('AppInsightsDatasource', () => {
|
||||
const ctx: any = {
|
||||
@ -17,7 +17,7 @@ describe('AppInsightsDatasource', () => {
|
||||
url: 'http://appinsightsapi',
|
||||
};
|
||||
|
||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, ctx.backendSrv, ctx.templateSrv, ctx.$q);
|
||||
ctx.ds = new Datasource(ctx.instanceSettings, ctx.backendSrv, ctx.templateSrv, ctx.$q);
|
||||
});
|
||||
|
||||
describe('When performing testDatasource', () => {
|
||||
@ -108,7 +108,121 @@ describe('AppInsightsDatasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing query', () => {
|
||||
describe('When performing raw query', () => {
|
||||
const queryString =
|
||||
'metrics ' +
|
||||
'| where $__timeFilter(timestamp) ' +
|
||||
'| where name == "testMetrics" ' +
|
||||
'| summarize max=max(valueMax) by bin(timestamp, $__interval), partition';
|
||||
|
||||
const options = {
|
||||
range: {
|
||||
from: toUtc('2017-08-22T20:00:00Z'),
|
||||
to: toUtc('2017-08-22T23:59:00Z'),
|
||||
},
|
||||
targets: [
|
||||
{
|
||||
apiVersion: '2016-09-01',
|
||||
refId: 'A',
|
||||
queryType: 'Application Insights',
|
||||
appInsights: {
|
||||
rawQuery: true,
|
||||
rawQueryString: queryString,
|
||||
timeColumn: 'timestamp',
|
||||
valueColumn: 'max',
|
||||
segmentColumn: undefined as string,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe('with no grouping', () => {
|
||||
const response: any = {
|
||||
results: {
|
||||
A: {
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
series: [
|
||||
{
|
||||
name: 'PrimaryResult',
|
||||
points: [[2.2075, 1558278660000]],
|
||||
},
|
||||
],
|
||||
tables: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.backendSrv.datasourceRequest = (options: any) => {
|
||||
expect(options.url).toContain('/api/tsdb/query');
|
||||
expect(options.data.queries.length).toBe(1);
|
||||
expect(options.data.queries[0].refId).toBe('A');
|
||||
expect(options.data.queries[0].appInsights.rawQueryString).toEqual(queryString);
|
||||
expect(options.data.queries[0].appInsights.timeColumn).toEqual('timestamp');
|
||||
expect(options.data.queries[0].appInsights.valueColumn).toEqual('max');
|
||||
expect(options.data.queries[0].appInsights.segmentColumn).toBeUndefined();
|
||||
return ctx.$q.when({ data: response, status: 200 });
|
||||
};
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds.query(options).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(data.name).toEqual('PrimaryResult');
|
||||
expect(data.fields[0].values.length).toEqual(1);
|
||||
expect(data.fields[1].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[0].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('with grouping', () => {
|
||||
const response: any = {
|
||||
results: {
|
||||
A: {
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
series: [
|
||||
{
|
||||
name: 'paritionA',
|
||||
points: [[2.2075, 1558278660000]],
|
||||
},
|
||||
],
|
||||
tables: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
options.targets[0].appInsights.segmentColumn = 'partition';
|
||||
ctx.backendSrv.datasourceRequest = (options: any) => {
|
||||
expect(options.url).toContain('/api/tsdb/query');
|
||||
expect(options.data.queries.length).toBe(1);
|
||||
expect(options.data.queries[0].refId).toBe('A');
|
||||
expect(options.data.queries[0].appInsights.rawQueryString).toEqual(queryString);
|
||||
expect(options.data.queries[0].appInsights.timeColumn).toEqual('timestamp');
|
||||
expect(options.data.queries[0].appInsights.valueColumn).toEqual('max');
|
||||
expect(options.data.queries[0].appInsights.segmentColumn).toEqual('partition');
|
||||
return ctx.$q.when({ data: response, status: 200 });
|
||||
};
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds.query(options).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(data.name).toEqual('paritionA');
|
||||
expect(data.fields[0].values.length).toEqual(1);
|
||||
expect(data.fields[1].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[0].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing metric query', () => {
|
||||
const options = {
|
||||
range: {
|
||||
from: toUtc('2017-08-22T20:00:00Z'),
|
||||
@ -121,30 +235,37 @@ describe('AppInsightsDatasource', () => {
|
||||
queryType: 'Application Insights',
|
||||
appInsights: {
|
||||
metricName: 'exceptions/server',
|
||||
groupBy: '',
|
||||
timeGrainType: 'none',
|
||||
timeGrain: '',
|
||||
timeGrainUnit: '',
|
||||
alias: '',
|
||||
dimension: '',
|
||||
timeGrain: 'none',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe('and with a single value', () => {
|
||||
const response = {
|
||||
value: {
|
||||
start: '2017-08-30T15:53:58.845Z',
|
||||
end: '2017-09-06T15:53:58.845Z',
|
||||
'exceptions/server': {
|
||||
sum: 100,
|
||||
const response: any = {
|
||||
results: {
|
||||
A: {
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
series: [
|
||||
{
|
||||
name: 'exceptions/server',
|
||||
points: [[2.2075, 1558278660000]],
|
||||
},
|
||||
],
|
||||
tables: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.backendSrv.datasourceRequest = (options: { url: string }) => {
|
||||
expect(options.url).toContain('/metrics/exceptions/server');
|
||||
ctx.backendSrv.datasourceRequest = (options: any) => {
|
||||
expect(options.url).toContain('/api/tsdb/query');
|
||||
expect(options.data.queries.length).toBe(1);
|
||||
expect(options.data.queries[0].refId).toBe('A');
|
||||
expect(options.data.queries[0].appInsights.rawQueryString).toBeUndefined();
|
||||
expect(options.data.queries[0].appInsights.metricName).toBe('exceptions/server');
|
||||
return ctx.$q.when({ data: response, status: 200 });
|
||||
};
|
||||
});
|
||||
@ -152,46 +273,39 @@ describe('AppInsightsDatasource', () => {
|
||||
it('should return a single datapoint', () => {
|
||||
return ctx.ds.query(options).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
expect(results.data[0].datapoints.length).toBe(1);
|
||||
expect(results.data[0].target).toEqual('exceptions/server');
|
||||
expect(results.data[0].datapoints[0][1]).toEqual(1504713238845);
|
||||
expect(results.data[0].datapoints[0][0]).toEqual(100);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(data.name).toEqual('exceptions/server');
|
||||
expect(data.fields[1].values.get(0)).toEqual(1558278660000);
|
||||
expect(data.fields[0].values.get(0)).toEqual(2.2075);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('and with an interval group and without a segment group by', () => {
|
||||
const response = {
|
||||
value: {
|
||||
start: '2017-08-30T15:53:58.845Z',
|
||||
end: '2017-09-06T15:53:58.845Z',
|
||||
interval: 'PT1H',
|
||||
segments: [
|
||||
{
|
||||
start: '2017-08-30T15:53:58.845Z',
|
||||
end: '2017-08-30T16:00:00.000Z',
|
||||
'exceptions/server': {
|
||||
sum: 3,
|
||||
const response: any = {
|
||||
results: {
|
||||
A: {
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
series: [
|
||||
{
|
||||
name: 'exceptions/server',
|
||||
points: [[3, 1504108800000], [6, 1504112400000]],
|
||||
},
|
||||
},
|
||||
{
|
||||
start: '2017-08-30T16:00:00.000Z',
|
||||
end: '2017-08-30T17:00:00.000Z',
|
||||
'exceptions/server': {
|
||||
sum: 66,
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
tables: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
options.targets[0].appInsights.timeGrainType = 'specific';
|
||||
options.targets[0].appInsights.timeGrain = '30';
|
||||
options.targets[0].appInsights.timeGrainUnit = 'minute';
|
||||
ctx.backendSrv.datasourceRequest = (options: { url: string }) => {
|
||||
expect(options.url).toContain('/metrics/exceptions/server');
|
||||
expect(options.url).toContain('interval=PT30M');
|
||||
options.targets[0].appInsights.timeGrain = 'PT30M';
|
||||
ctx.backendSrv.datasourceRequest = (options: any) => {
|
||||
expect(options.url).toContain('/api/tsdb/query');
|
||||
expect(options.data.queries[0].refId).toBe('A');
|
||||
expect(options.data.queries[0].appInsights.rawQueryString).toBeUndefined();
|
||||
expect(options.data.queries[0].appInsights.metricName).toBe('exceptions/server');
|
||||
expect(options.data.queries[0].appInsights.timeGrain).toBe('PT30M');
|
||||
return ctx.$q.when({ data: response, status: 200 });
|
||||
};
|
||||
});
|
||||
@ -199,108 +313,68 @@ describe('AppInsightsDatasource', () => {
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds.query(options).then((results: any) => {
|
||||
expect(results.data.length).toBe(1);
|
||||
expect(results.data[0].datapoints.length).toBe(2);
|
||||
expect(results.data[0].target).toEqual('exceptions/server');
|
||||
expect(results.data[0].datapoints[0][1]).toEqual(1504108800000);
|
||||
expect(results.data[0].datapoints[0][0]).toEqual(3);
|
||||
expect(results.data[0].datapoints[1][1]).toEqual(1504112400000);
|
||||
expect(results.data[0].datapoints[1][0]).toEqual(66);
|
||||
const data = results.data[0] as DataFrame;
|
||||
expect(data.name).toEqual('exceptions/server');
|
||||
expect(data.fields[0].values.length).toEqual(2);
|
||||
expect(data.fields[1].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[0].values.get(0)).toEqual(3);
|
||||
expect(data.fields[1].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[0].values.get(1)).toEqual(6);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('and with a group by', () => {
|
||||
const response = {
|
||||
value: {
|
||||
start: '2017-08-30T15:53:58.845Z',
|
||||
end: '2017-09-06T15:53:58.845Z',
|
||||
interval: 'PT1H',
|
||||
segments: [
|
||||
{
|
||||
start: '2017-08-30T15:53:58.845Z',
|
||||
end: '2017-08-30T16:00:00.000Z',
|
||||
segments: [
|
||||
{
|
||||
'exceptions/server': {
|
||||
sum: 10,
|
||||
},
|
||||
'client/city': 'Miami',
|
||||
},
|
||||
{
|
||||
'exceptions/server': {
|
||||
sum: 1,
|
||||
},
|
||||
'client/city': 'San Jose',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
start: '2017-08-30T16:00:00.000Z',
|
||||
end: '2017-08-30T17:00:00.000Z',
|
||||
segments: [
|
||||
{
|
||||
'exceptions/server': {
|
||||
sum: 20,
|
||||
},
|
||||
'client/city': 'Miami',
|
||||
},
|
||||
{
|
||||
'exceptions/server': {
|
||||
sum: 2,
|
||||
},
|
||||
'client/city': 'San Antonio',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
const response: any = {
|
||||
results: {
|
||||
A: {
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
series: [
|
||||
{
|
||||
name: 'exceptions/server{client/city="Miami"}',
|
||||
points: [[10, 1504108800000], [20, 1504112400000]],
|
||||
},
|
||||
{
|
||||
name: 'exceptions/server{client/city="San Antonio"}',
|
||||
points: [[1, 1504108800000], [2, 1504112400000]],
|
||||
},
|
||||
],
|
||||
tables: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
describe('and with no alias specified', () => {
|
||||
beforeEach(() => {
|
||||
options.targets[0].appInsights.groupBy = 'client/city';
|
||||
options.targets[0].appInsights.dimension = 'client/city';
|
||||
|
||||
ctx.backendSrv.datasourceRequest = (options: { url: string }) => {
|
||||
expect(options.url).toContain('/metrics/exceptions/server');
|
||||
expect(options.url).toContain('segment=client/city');
|
||||
ctx.backendSrv.datasourceRequest = (options: any) => {
|
||||
expect(options.url).toContain('/api/tsdb/query');
|
||||
expect(options.data.queries[0].appInsights.rawQueryString).toBeUndefined();
|
||||
expect(options.data.queries[0].appInsights.metricName).toBe('exceptions/server');
|
||||
expect(options.data.queries[0].appInsights.dimension).toBe('client/city');
|
||||
return ctx.$q.when({ data: response, status: 200 });
|
||||
};
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds.query(options).then((results: any) => {
|
||||
expect(results.data.length).toBe(3);
|
||||
expect(results.data[0].datapoints.length).toBe(2);
|
||||
expect(results.data[0].target).toEqual('exceptions/server{client/city="Miami"}');
|
||||
expect(results.data[0].datapoints[0][1]).toEqual(1504108800000);
|
||||
expect(results.data[0].datapoints[0][0]).toEqual(10);
|
||||
expect(results.data[0].datapoints[1][1]).toEqual(1504112400000);
|
||||
expect(results.data[0].datapoints[1][0]).toEqual(20);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('and with an alias specified', () => {
|
||||
beforeEach(() => {
|
||||
options.targets[0].appInsights.groupBy = 'client/city';
|
||||
options.targets[0].appInsights.alias = '{{metric}} + {{groupbyname}} + {{groupbyvalue}}';
|
||||
|
||||
ctx.backendSrv.datasourceRequest = (options: { url: string }) => {
|
||||
expect(options.url).toContain('/metrics/exceptions/server');
|
||||
expect(options.url).toContain('segment=client/city');
|
||||
return ctx.$q.when({ data: response, status: 200 });
|
||||
};
|
||||
});
|
||||
|
||||
it('should return a list of datapoints', () => {
|
||||
return ctx.ds.query(options).then((results: any) => {
|
||||
expect(results.data.length).toBe(3);
|
||||
expect(results.data[0].datapoints.length).toBe(2);
|
||||
expect(results.data[0].target).toEqual('exceptions/server + client/city + Miami');
|
||||
expect(results.data[0].datapoints[0][1]).toEqual(1504108800000);
|
||||
expect(results.data[0].datapoints[0][0]).toEqual(10);
|
||||
expect(results.data[0].datapoints[1][1]).toEqual(1504112400000);
|
||||
expect(results.data[0].datapoints[1][0]).toEqual(20);
|
||||
expect(results.data.length).toBe(2);
|
||||
let data = results.data[0] as DataFrame;
|
||||
expect(data.name).toEqual('exceptions/server{client/city="Miami"}');
|
||||
expect(data.fields[0].values.length).toEqual(2);
|
||||
expect(data.fields[1].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[0].values.get(0)).toEqual(10);
|
||||
expect(data.fields[1].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[0].values.get(1)).toEqual(20);
|
||||
data = results.data[1] as DataFrame;
|
||||
expect(data.name).toEqual('exceptions/server{client/city="San Antonio"}');
|
||||
expect(data.fields[0].values.length).toEqual(2);
|
||||
expect(data.fields[1].values.get(0)).toEqual(1504108800000);
|
||||
expect(data.fields[0].values.get(0)).toEqual(1);
|
||||
expect(data.fields[1].values.get(1)).toEqual(1504112400000);
|
||||
expect(data.fields[0].values.get(1)).toEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,12 +1,12 @@
|
||||
import _ from 'lodash';
|
||||
import AppInsightsQuerystringBuilder from './app_insights_querystring_builder';
|
||||
import LogAnalyticsQuerystringBuilder from '../log_analytics/querystring_builder';
|
||||
import ResponseParser from './response_parser';
|
||||
import { DataSourceInstanceSettings } from '@grafana/ui';
|
||||
import { AzureDataSourceJsonData } from '../types';
|
||||
import { TimeSeries, toDataFrame } from '@grafana/data';
|
||||
import { DataQueryRequest, DataQueryResponseData, DataSourceInstanceSettings } from '@grafana/ui';
|
||||
import { BackendSrv } from 'app/core/services/backend_srv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { IQService } from 'angular';
|
||||
import _ from 'lodash';
|
||||
|
||||
import TimegrainConverter from '../time_grain_converter';
|
||||
import { AzureDataSourceJsonData, AzureMonitorQuery } from '../types';
|
||||
import ResponseParser from './response_parser';
|
||||
|
||||
export interface LogAnalyticsColumn {
|
||||
text: string;
|
||||
@ -24,8 +24,7 @@ export default class AppInsightsDatasource {
|
||||
constructor(
|
||||
instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>,
|
||||
private backendSrv: BackendSrv,
|
||||
private templateSrv: TemplateSrv,
|
||||
private $q: IQService
|
||||
private templateSrv: TemplateSrv
|
||||
) {
|
||||
this.id = instanceSettings.id;
|
||||
this.applicationId = instanceSettings.jsonData.appInsightsAppId;
|
||||
@ -37,73 +36,82 @@ export default class AppInsightsDatasource {
|
||||
return !!this.applicationId && this.applicationId.length > 0;
|
||||
}
|
||||
|
||||
query(options: any) {
|
||||
createRawQueryRequest(item: any, options: DataQueryRequest<AzureMonitorQuery>, target: AzureMonitorQuery) {
|
||||
if (item.xaxis && !item.timeColumn) {
|
||||
item.timeColumn = item.xaxis;
|
||||
}
|
||||
|
||||
if (item.yaxis && !item.valueColumn) {
|
||||
item.valueColumn = item.yaxis;
|
||||
}
|
||||
|
||||
if (item.spliton && !item.segmentColumn) {
|
||||
item.segmentColumn = item.spliton;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'timeSeriesQuery',
|
||||
raw: false,
|
||||
appInsights: {
|
||||
rawQuery: true,
|
||||
rawQueryString: this.templateSrv.replace(item.rawQueryString, options.scopedVars),
|
||||
timeColumn: item.timeColumn,
|
||||
valueColumn: item.valueColumn,
|
||||
segmentColumn: item.segmentColumn,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
createMetricsRequest(item: any, options: DataQueryRequest<AzureMonitorQuery>, target: AzureMonitorQuery) {
|
||||
// fix for timeGrainUnit which is a deprecated/removed field name
|
||||
if (item.timeGrainCount) {
|
||||
item.timeGrain = TimegrainConverter.createISO8601Duration(item.timeGrainCount, item.timeGrainUnit);
|
||||
} else if (item.timeGrainUnit && item.timeGrain !== 'auto') {
|
||||
item.timeGrain = TimegrainConverter.createISO8601Duration(item.timeGrain, item.timeGrainUnit);
|
||||
}
|
||||
|
||||
// migration for non-standard names
|
||||
if (item.groupBy && !item.dimension) {
|
||||
item.dimension = item.groupBy;
|
||||
}
|
||||
|
||||
if (item.filter && !item.dimensionFilter) {
|
||||
item.dimensionFilter = item.filter;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'timeSeriesQuery',
|
||||
raw: false,
|
||||
appInsights: {
|
||||
rawQuery: false,
|
||||
timeGrain: this.templateSrv.replace((item.timeGrain || '').toString(), options.scopedVars),
|
||||
allowedTimeGrainsMs: item.allowedTimeGrainsMs,
|
||||
metricName: this.templateSrv.replace(item.metricName, options.scopedVars),
|
||||
aggregation: this.templateSrv.replace(item.aggregation, options.scopedVars),
|
||||
dimension: this.templateSrv.replace(item.dimension, options.scopedVars),
|
||||
dimensionFilter: this.templateSrv.replace(item.dimensionFilter, options.scopedVars),
|
||||
alias: item.alias,
|
||||
format: target.format,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async query(options: DataQueryRequest<AzureMonitorQuery>): Promise<DataQueryResponseData[]> {
|
||||
const queries = _.filter(options.targets, item => {
|
||||
return item.hide !== true;
|
||||
}).map(target => {
|
||||
}).map((target: AzureMonitorQuery) => {
|
||||
const item = target.appInsights;
|
||||
let query: any;
|
||||
if (item.rawQuery) {
|
||||
const querystringBuilder = new LogAnalyticsQuerystringBuilder(
|
||||
this.templateSrv.replace(item.rawQueryString, options.scopedVars),
|
||||
options,
|
||||
'timestamp'
|
||||
);
|
||||
const generated = querystringBuilder.generate();
|
||||
|
||||
const url = `${this.baseUrl}/query?${generated.uriString}`;
|
||||
|
||||
return {
|
||||
refId: target.refId,
|
||||
intervalMs: options.intervalMs,
|
||||
maxDataPoints: options.maxDataPoints,
|
||||
datasourceId: this.id,
|
||||
url: url,
|
||||
format: options.format,
|
||||
alias: item.alias,
|
||||
query: generated.rawQuery,
|
||||
xaxis: item.xaxis,
|
||||
yaxis: item.yaxis,
|
||||
spliton: item.spliton,
|
||||
raw: true,
|
||||
};
|
||||
query = this.createRawQueryRequest(item, options, target);
|
||||
} else {
|
||||
const querystringBuilder = new AppInsightsQuerystringBuilder(
|
||||
options.range.from,
|
||||
options.range.to,
|
||||
options.interval
|
||||
);
|
||||
|
||||
if (item.groupBy !== 'none') {
|
||||
querystringBuilder.setGroupBy(this.templateSrv.replace(item.groupBy, options.scopedVars));
|
||||
}
|
||||
querystringBuilder.setAggregation(item.aggregation);
|
||||
querystringBuilder.setInterval(
|
||||
item.timeGrainType,
|
||||
this.templateSrv.replace(item.timeGrain, options.scopedVars),
|
||||
item.timeGrainUnit
|
||||
);
|
||||
|
||||
querystringBuilder.setFilter(this.templateSrv.replace(item.filter || ''));
|
||||
|
||||
const url = `${this.baseUrl}/metrics/${this.templateSrv.replace(
|
||||
encodeURI(item.metricName),
|
||||
options.scopedVars
|
||||
)}?${querystringBuilder.generate()}`;
|
||||
|
||||
return {
|
||||
refId: target.refId,
|
||||
intervalMs: options.intervalMs,
|
||||
maxDataPoints: options.maxDataPoints,
|
||||
datasourceId: this.id,
|
||||
url: url,
|
||||
format: options.format,
|
||||
alias: item.alias,
|
||||
xaxis: '',
|
||||
yaxis: '',
|
||||
spliton: '',
|
||||
raw: false,
|
||||
};
|
||||
query = this.createMetricsRequest(item, options, target);
|
||||
}
|
||||
query.refId = target.refId;
|
||||
query.intervalMs = options.intervalMs;
|
||||
query.datasourceId = this.id;
|
||||
query.queryType = 'Application Insights';
|
||||
return query;
|
||||
});
|
||||
|
||||
if (!queries || queries.length === 0) {
|
||||
@ -111,25 +119,42 @@ export default class AppInsightsDatasource {
|
||||
return;
|
||||
}
|
||||
|
||||
const promises = this.doQueries(queries);
|
||||
const { data } = await this.backendSrv.datasourceRequest({
|
||||
url: '/api/tsdb/query',
|
||||
method: 'POST',
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries,
|
||||
},
|
||||
});
|
||||
|
||||
return this.$q
|
||||
.all(promises)
|
||||
.then(results => {
|
||||
return new ResponseParser(results).parseQueryResult();
|
||||
})
|
||||
.then(results => {
|
||||
const flattened: any[] = [];
|
||||
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
if (results[i].columnsForDropdown) {
|
||||
this.logAnalyticsColumns[results[i].refId] = results[i].columnsForDropdown;
|
||||
}
|
||||
flattened.push(results[i]);
|
||||
const result: DataQueryResponseData[] = [];
|
||||
if (data.results) {
|
||||
Object.values(data.results).forEach((queryRes: any) => {
|
||||
if (queryRes.meta && queryRes.meta.columns) {
|
||||
const columnNames = queryRes.meta.columns as string[];
|
||||
this.logAnalyticsColumns[queryRes.refId] = _.map(columnNames, n => ({ text: n, value: n }));
|
||||
}
|
||||
|
||||
return flattened;
|
||||
if (!queryRes.series) {
|
||||
return;
|
||||
}
|
||||
|
||||
queryRes.series.forEach((series: any) => {
|
||||
const timeSerie: TimeSeries = {
|
||||
target: series.name,
|
||||
datapoints: series.points,
|
||||
refId: queryRes.refId,
|
||||
meta: queryRes.meta,
|
||||
};
|
||||
result.push(toDataFrame(timeSerie));
|
||||
});
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
doQueries(queries: any) {
|
||||
|
@ -1,72 +0,0 @@
|
||||
import AppInsightsQuerystringBuilder from './app_insights_querystring_builder';
|
||||
import { toUtc } from '@grafana/data';
|
||||
|
||||
describe('AppInsightsQuerystringBuilder', () => {
|
||||
let builder: AppInsightsQuerystringBuilder;
|
||||
|
||||
beforeEach(() => {
|
||||
builder = new AppInsightsQuerystringBuilder(toUtc('2017-08-22 06:00'), toUtc('2017-08-22 07:00'), '1h');
|
||||
});
|
||||
|
||||
describe('with only from/to date range', () => {
|
||||
it('should always add datetime filtering to the querystring', () => {
|
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z`;
|
||||
expect(builder.generate()).toEqual(querystring);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with from/to date range and aggregation type', () => {
|
||||
beforeEach(() => {
|
||||
builder.setAggregation('avg');
|
||||
});
|
||||
|
||||
it('should add datetime filtering and aggregation to the querystring', () => {
|
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&aggregation=avg`;
|
||||
expect(builder.generate()).toEqual(querystring);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with from/to date range and group by segment', () => {
|
||||
beforeEach(() => {
|
||||
builder.setGroupBy('client/city');
|
||||
});
|
||||
|
||||
it('should add datetime filtering and segment to the querystring', () => {
|
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&segment=client/city`;
|
||||
expect(builder.generate()).toEqual(querystring);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with from/to date range and specific group by interval', () => {
|
||||
beforeEach(() => {
|
||||
builder.setInterval('specific', 1, 'hour');
|
||||
});
|
||||
|
||||
it('should add datetime filtering and interval to the querystring', () => {
|
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&interval=PT1H`;
|
||||
expect(builder.generate()).toEqual(querystring);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with from/to date range and auto group by interval', () => {
|
||||
beforeEach(() => {
|
||||
builder.setInterval('auto', '', '');
|
||||
});
|
||||
|
||||
it('should add datetime filtering and interval to the querystring', () => {
|
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&interval=PT1H`;
|
||||
expect(builder.generate()).toEqual(querystring);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with filter', () => {
|
||||
beforeEach(() => {
|
||||
builder.setFilter(`client/city eq 'Boydton'`);
|
||||
});
|
||||
|
||||
it('should add datetime filtering and interval to the querystring', () => {
|
||||
const querystring = `timespan=2017-08-22T06:00:00Z/2017-08-22T07:00:00Z&filter=client/city eq 'Boydton'`;
|
||||
expect(builder.generate()).toEqual(querystring);
|
||||
});
|
||||
});
|
||||
});
|
@ -1,56 +0,0 @@
|
||||
import TimeGrainConverter from '../time_grain_converter';
|
||||
|
||||
export default class AppInsightsQuerystringBuilder {
|
||||
aggregation = '';
|
||||
groupBy = '';
|
||||
timeGrainType = '';
|
||||
timeGrain = '';
|
||||
timeGrainUnit = '';
|
||||
filter = '';
|
||||
|
||||
constructor(private from: any, private to: any, public grafanaInterval: any) {}
|
||||
|
||||
setAggregation(aggregation: string) {
|
||||
this.aggregation = aggregation;
|
||||
}
|
||||
|
||||
setGroupBy(groupBy: string) {
|
||||
this.groupBy = groupBy;
|
||||
}
|
||||
|
||||
setInterval(timeGrainType: string, timeGrain: any, timeGrainUnit: string) {
|
||||
this.timeGrainType = timeGrainType;
|
||||
this.timeGrain = timeGrain;
|
||||
this.timeGrainUnit = timeGrainUnit;
|
||||
}
|
||||
|
||||
setFilter(filter: string) {
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
generate() {
|
||||
let querystring = `timespan=${this.from.utc().format()}/${this.to.utc().format()}`;
|
||||
|
||||
if (this.aggregation && this.aggregation.length > 0) {
|
||||
querystring += `&aggregation=${this.aggregation}`;
|
||||
}
|
||||
|
||||
if (this.groupBy && this.groupBy.length > 0) {
|
||||
querystring += `&segment=${this.groupBy}`;
|
||||
}
|
||||
|
||||
if (this.timeGrainType === 'specific' && this.timeGrain && this.timeGrainUnit) {
|
||||
querystring += `&interval=${TimeGrainConverter.createISO8601Duration(this.timeGrain, this.timeGrainUnit)}`;
|
||||
}
|
||||
|
||||
if (this.timeGrainType === 'auto') {
|
||||
querystring += `&interval=${TimeGrainConverter.createISO8601DurationFromInterval(this.grafanaInterval)}`;
|
||||
}
|
||||
|
||||
if (this.filter) {
|
||||
querystring += `&filter=${this.filter}`;
|
||||
}
|
||||
|
||||
return querystring;
|
||||
}
|
||||
}
|
@ -22,12 +22,7 @@ export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDa
|
||||
) {
|
||||
super(instanceSettings);
|
||||
this.azureMonitorDatasource = new AzureMonitorDatasource(instanceSettings, this.backendSrv, this.templateSrv);
|
||||
this.appInsightsDatasource = new AppInsightsDatasource(
|
||||
instanceSettings,
|
||||
this.backendSrv,
|
||||
this.templateSrv,
|
||||
this.$q
|
||||
);
|
||||
this.appInsightsDatasource = new AppInsightsDatasource(instanceSettings, this.backendSrv, this.templateSrv);
|
||||
|
||||
this.azureLogAnalyticsDatasource = new AzureLogAnalyticsDatasource(
|
||||
instanceSettings,
|
||||
|
@ -48,7 +48,7 @@
|
||||
get-options="ctrl.getMetricNamespaces($query)" on-change="ctrl.onMetricNamespacesChange()" css-class="min-width-12">
|
||||
</gf-form-dropdown>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label query-keyword width-9">Metric</label>
|
||||
<gf-form-dropdown model="ctrl.target.azureMonitor.metricName" allow-custom="true" lookup-text="true"
|
||||
get-options="ctrl.getMetricNames($query)" on-change="ctrl.onMetricNameChange()" css-class="min-width-12">
|
||||
@ -62,7 +62,7 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label query-keyword width-9">Time Grain</label>
|
||||
<div class="gf-form-select-wrapper gf-form-select-wrapper--caret-indent timegrainunit-dropdown-wrapper">
|
||||
@ -72,7 +72,7 @@
|
||||
</div>
|
||||
<div class="gf-form" ng-show="ctrl.target.azureMonitor.timeGrain.trim() === 'auto'">
|
||||
<label class="gf-form-label">Auto Interval</label>
|
||||
<label class="gf-form-label">{{ctrl.getAutoInterval()}}</label>
|
||||
<label class="gf-form-label">{{ctrl.getAzureMonitorAutoInterval()}}</label>
|
||||
</div>
|
||||
<div class="gf-form gf-form--grow">
|
||||
<div class="gf-form-label gf-form-label--grow"></div>
|
||||
@ -238,19 +238,19 @@
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label query-keyword width-9">Group By</label>
|
||||
<gf-form-dropdown allow-custom="true" ng-hide="ctrl.target.appInsights.groupBy !== 'none'" model="ctrl.target.appInsights.groupBy"
|
||||
<gf-form-dropdown allow-custom="true" ng-hide="ctrl.target.appInsights.dimension !== 'none'" model="ctrl.target.appInsights.dimension"
|
||||
lookup-text="true" get-options="ctrl.getAppInsightsGroupBySegments($query)" on-change="ctrl.refresh()"
|
||||
css-class="min-width-20">
|
||||
</gf-form-dropdown>
|
||||
<label class="gf-form-label min-width-20 pointer" ng-hide="ctrl.target.appInsights.groupBy === 'none'"
|
||||
ng-click="ctrl.resetAppInsightsGroupBy()">{{ctrl.target.appInsights.groupBy}}
|
||||
<label class="gf-form-label min-width-20 pointer" ng-hide="ctrl.target.appInsights.dimension === 'none'"
|
||||
ng-click="ctrl.resetAppInsightsGroupBy()">{{ctrl.target.appInsights.dimension}}
|
||||
<i class="fa fa-remove"></i>
|
||||
</label>
|
||||
</div>
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label query-keyword width-9">Filter</label>
|
||||
<input type="text" class="gf-form-input width-17" ng-model="ctrl.target.appInsights.filter" spellcheck="false"
|
||||
<input type="text" class="gf-form-input width-17" ng-model="ctrl.target.appInsights.dimensionFilter" spellcheck="false"
|
||||
placeholder="your/groupby eq 'a_value'" ng-blur="ctrl.refresh()">
|
||||
</div>
|
||||
</div>
|
||||
@ -258,7 +258,6 @@
|
||||
<div class="gf-form-label gf-form-label--grow"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label query-keyword width-9">Time Grain</label>
|
||||
@ -268,17 +267,17 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="gf-form" ng-hide="ctrl.target.appInsights.timeGrainType === 'auto' || ctrl.target.appInsights.timeGrainType === 'none'">
|
||||
<input type="text" class="gf-form-input width-3" ng-model="ctrl.target.appInsights.timeGrain" spellcheck="false"
|
||||
placeholder="" ng-blur="ctrl.refresh()">
|
||||
<input type="text" class="gf-form-input width-3" ng-model="ctrl.target.appInsights.timeGrainCount" spellcheck="false"
|
||||
placeholder="" ng-blur="ctrl.updateAppInsightsTimeGrain()">
|
||||
</div>
|
||||
<div class="gf-form" ng-hide="ctrl.target.appInsights.timeGrainType === 'auto' || ctrl.target.appInsights.timeGrainType === 'none'">
|
||||
<div class="gf-form-select-wrapper gf-form-select-wrapper--caret-indent timegrainunit-dropdown-wrapper">
|
||||
<div class="gf-form-select-wrapper gf-form-select-wrapper--caret-indent timegrainunit-dropdown-wrapper">
|
||||
<select class="gf-form-input" ng-model="ctrl.target.appInsights.timeGrainUnit" ng-options="f as f for f in ['minute', 'hour', 'day', 'month', 'year']"
|
||||
ng-change="ctrl.refresh()"></select>
|
||||
ng-change="ctrl.updateAppInsightsTimeGrain()"></select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="gf-form" ng-hide="ctrl.target.appInsights.timeGrainType !== 'auto'">
|
||||
<label class="gf-form-label">Auto Interval</label>
|
||||
<label class="gf-form-label">Auto Interval</label>
|
||||
<label class="gf-form-label">{{ctrl.getAppInsightsAutoInterval()}}</label>
|
||||
</div>
|
||||
<div class="gf-form gf-form--grow">
|
||||
@ -291,10 +290,9 @@
|
||||
<input type="text" class="gf-form-input width-30" ng-model="ctrl.target.appInsights.alias" spellcheck="false"
|
||||
placeholder="alias patterns (see help for more info)" ng-blur="ctrl.refresh()">
|
||||
</div>
|
||||
|
||||
<div class="gf-form gf-form--grow">
|
||||
<div class="gf-form-label gf-form-label--grow"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="gf-form gf-form--grow">
|
||||
<div class="gf-form-label gf-form-label--grow"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div ng-show="ctrl.target.appInsights.rawQuery">
|
||||
@ -316,13 +314,13 @@
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label query-keyword width-9">X-axis</label>
|
||||
<gf-form-dropdown model="ctrl.target.appInsights.xaxis" allow-custom="true" placeholder="eg. 'timestamp'"
|
||||
<gf-form-dropdown model="ctrl.target.appInsights.timeColumn" allow-custom="true" placeholder="eg. 'timestamp'"
|
||||
get-options="ctrl.getAppInsightsColumns($query)" on-change="ctrl.onAppInsightsColumnChange()" css-class="min-width-20">
|
||||
</gf-form-dropdown>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label query-keyword width-9">Y-axis(es)</label>
|
||||
<gf-form-dropdown model="ctrl.target.appInsights.yaxis" allow-custom="true" get-options="ctrl.getAppInsightsColumns($query)"
|
||||
<label class="gf-form-label query-keyword width-9">Y-axis</label>
|
||||
<gf-form-dropdown model="ctrl.target.appInsights.valueColumn" allow-custom="true" get-options="ctrl.getAppInsightsColumns($query)"
|
||||
on-change="ctrl.onAppInsightsColumnChange()" css-class="min-width-20">
|
||||
</gf-form-dropdown>
|
||||
</div>
|
||||
@ -333,7 +331,7 @@
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label query-keyword width-9">Split On</label>
|
||||
<gf-form-dropdown model="ctrl.target.appInsights.spliton" allow-custom="true" get-options="ctrl.getAppInsightsColumns($query)"
|
||||
<gf-form-dropdown model="ctrl.target.appInsights.segmentColumn" allow-custom="true" get-options="ctrl.getAppInsightsColumns($query)"
|
||||
on-change="ctrl.onAppInsightsColumnChange()" css-class="min-width-20">
|
||||
</gf-form-dropdown>
|
||||
</div>
|
||||
|
@ -41,7 +41,7 @@ describe('AzureMonitorQueryCtrl', () => {
|
||||
expect(queryCtrl.target.azureMonitor.resourceName).toBe('select');
|
||||
expect(queryCtrl.target.azureMonitor.metricNamespace).toBe('select');
|
||||
expect(queryCtrl.target.azureMonitor.metricName).toBe('select');
|
||||
expect(queryCtrl.target.appInsights.groupBy).toBe('none');
|
||||
expect(queryCtrl.target.appInsights.dimension).toBe('none');
|
||||
});
|
||||
});
|
||||
|
||||
@ -239,6 +239,35 @@ describe('AzureMonitorQueryCtrl', () => {
|
||||
});
|
||||
|
||||
describe('and query type is Application Insights', () => {
|
||||
describe('and target is in old format', () => {
|
||||
it('data is migrated', () => {
|
||||
queryCtrl.target.appInsights.xaxis = 'sample-x';
|
||||
queryCtrl.target.appInsights.yaxis = 'sample-y';
|
||||
queryCtrl.target.appInsights.spliton = 'sample-split';
|
||||
queryCtrl.target.appInsights.groupBy = 'sample-group';
|
||||
queryCtrl.target.appInsights.groupByOptions = ['sample-group-1', 'sample-group-2'];
|
||||
queryCtrl.target.appInsights.filter = 'sample-filter';
|
||||
queryCtrl.target.appInsights.metricName = 'sample-metric';
|
||||
|
||||
queryCtrl.migrateApplicationInsightsKeys();
|
||||
|
||||
expect(queryCtrl.target.appInsights.xaxis).toBeUndefined();
|
||||
expect(queryCtrl.target.appInsights.yaxis).toBeUndefined();
|
||||
expect(queryCtrl.target.appInsights.spliton).toBeUndefined();
|
||||
expect(queryCtrl.target.appInsights.groupBy).toBeUndefined();
|
||||
expect(queryCtrl.target.appInsights.groupByOptions).toBeUndefined();
|
||||
expect(queryCtrl.target.appInsights.filter).toBeUndefined();
|
||||
|
||||
expect(queryCtrl.target.appInsights.timeColumn).toBe('sample-x');
|
||||
expect(queryCtrl.target.appInsights.valueColumn).toBe('sample-y');
|
||||
expect(queryCtrl.target.appInsights.segmentColumn).toBe('sample-split');
|
||||
expect(queryCtrl.target.appInsights.dimension).toBe('sample-group');
|
||||
expect(queryCtrl.target.appInsights.dimensions).toEqual(['sample-group-1', 'sample-group-2']);
|
||||
expect(queryCtrl.target.appInsights.dimensionFilter).toBe('sample-filter');
|
||||
expect(queryCtrl.target.appInsights.metricName).toBe('sample-metric');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when getOptions for the Metric Names dropdown is called', () => {
|
||||
const response = [{ text: 'metric1', value: 'metric1' }, { text: 'metric2', value: 'metric2' }];
|
||||
|
||||
@ -259,7 +288,7 @@ describe('AzureMonitorQueryCtrl', () => {
|
||||
|
||||
describe('when getOptions for the GroupBy segments dropdown is called', () => {
|
||||
beforeEach(() => {
|
||||
queryCtrl.target.appInsights.groupByOptions = ['opt1', 'opt2'];
|
||||
queryCtrl.target.appInsights.dimensions = ['opt1', 'opt2'];
|
||||
});
|
||||
|
||||
it('should return a list of GroupBy segments', () => {
|
||||
@ -291,8 +320,8 @@ describe('AzureMonitorQueryCtrl', () => {
|
||||
expect(queryCtrl.target.appInsights.aggregation).toBe('avg');
|
||||
expect(queryCtrl.target.appInsights.aggOptions).toContain('avg');
|
||||
expect(queryCtrl.target.appInsights.aggOptions).toContain('sum');
|
||||
expect(queryCtrl.target.appInsights.groupByOptions).toContain('client/os');
|
||||
expect(queryCtrl.target.appInsights.groupByOptions).toContain('client/city');
|
||||
expect(queryCtrl.target.appInsights.dimensions).toContain('client/os');
|
||||
expect(queryCtrl.target.appInsights.dimensions).toContain('client/city');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -32,13 +32,13 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
dimensionFilter: string;
|
||||
timeGrain: string;
|
||||
timeGrainUnit: string;
|
||||
timeGrains: Array<{ text: string; value: string }>;
|
||||
allowedTimeGrainsMs: number[];
|
||||
dimensions: any[];
|
||||
dimension: any;
|
||||
top: string;
|
||||
aggregation: string;
|
||||
aggOptions: string[];
|
||||
timeGrains: Array<{ text: string; value: string }>;
|
||||
};
|
||||
azureLogAnalytics: {
|
||||
query: string;
|
||||
@ -46,19 +46,28 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
workspace: string;
|
||||
};
|
||||
appInsights: {
|
||||
metricName: string;
|
||||
rawQuery: boolean;
|
||||
rawQueryString: string;
|
||||
groupBy: string;
|
||||
timeGrainType: string;
|
||||
xaxis: string;
|
||||
yaxis: string;
|
||||
spliton: string;
|
||||
// metric style query when rawQuery == false
|
||||
metricName: string;
|
||||
dimension: any;
|
||||
dimensionFilter: string;
|
||||
dimensions: string[];
|
||||
|
||||
aggOptions: string[];
|
||||
aggregation: string;
|
||||
groupByOptions: string[];
|
||||
|
||||
timeGrainType: string;
|
||||
timeGrainCount: string;
|
||||
timeGrainUnit: string;
|
||||
timeGrain: string;
|
||||
timeGrains: Array<{ text: string; value: string }>;
|
||||
allowedTimeGrainsMs: number[];
|
||||
|
||||
// query style query when rawQuery == true
|
||||
rawQueryString: string;
|
||||
timeColumn: string;
|
||||
valueColumn: string;
|
||||
segmentColumn: string;
|
||||
};
|
||||
};
|
||||
|
||||
@ -73,6 +82,8 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
dimensionFilter: '*',
|
||||
timeGrain: 'auto',
|
||||
top: '10',
|
||||
aggOptions: [] as string[],
|
||||
timeGrains: [] as string[],
|
||||
},
|
||||
azureLogAnalytics: {
|
||||
query: [
|
||||
@ -96,11 +107,10 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
metricName: this.defaultDropdownValue,
|
||||
rawQuery: false,
|
||||
rawQueryString: '',
|
||||
groupBy: 'none',
|
||||
timeGrainType: 'auto',
|
||||
xaxis: 'timestamp',
|
||||
yaxis: '',
|
||||
spliton: '',
|
||||
dimension: 'none',
|
||||
timeGrain: 'auto',
|
||||
timeColumn: 'timestamp',
|
||||
valueColumn: '',
|
||||
},
|
||||
};
|
||||
|
||||
@ -124,6 +134,8 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
|
||||
this.migrateToDefaultNamespace();
|
||||
|
||||
this.migrateApplicationInsightsKeys();
|
||||
|
||||
this.panelCtrl.events.on('data-received', this.onDataReceived.bind(this), $scope);
|
||||
this.panelCtrl.events.on('data-error', this.onDataError.bind(this), $scope);
|
||||
this.resultFormats = [{ text: 'Time series', value: 'time_series' }, { text: 'Table', value: 'table' }];
|
||||
@ -184,6 +196,23 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
this.onMetricNameChange();
|
||||
}
|
||||
|
||||
if (this.target.appInsights.timeGrainUnit) {
|
||||
if (this.target.appInsights.timeGrain !== 'auto') {
|
||||
if (this.target.appInsights.timeGrainCount) {
|
||||
this.target.appInsights.timeGrain = TimegrainConverter.createISO8601Duration(
|
||||
this.target.appInsights.timeGrainCount,
|
||||
this.target.appInsights.timeGrainUnit
|
||||
);
|
||||
} else {
|
||||
this.target.appInsights.timeGrainCount = this.target.appInsights.timeGrain;
|
||||
this.target.appInsights.timeGrain = TimegrainConverter.createISO8601Duration(
|
||||
this.target.appInsights.timeGrain,
|
||||
this.target.appInsights.timeGrainUnit
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
this.target.azureMonitor.timeGrains &&
|
||||
this.target.azureMonitor.timeGrains.length > 0 &&
|
||||
@ -191,6 +220,14 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
) {
|
||||
this.target.azureMonitor.allowedTimeGrainsMs = this.convertTimeGrainsToMs(this.target.azureMonitor.timeGrains);
|
||||
}
|
||||
|
||||
if (
|
||||
this.target.appInsights.timeGrains &&
|
||||
this.target.appInsights.timeGrains.length > 0 &&
|
||||
(!this.target.appInsights.allowedTimeGrainsMs || this.target.appInsights.allowedTimeGrainsMs.length === 0)
|
||||
) {
|
||||
this.target.appInsights.allowedTimeGrainsMs = this.convertTimeGrainsToMs(this.target.appInsights.timeGrains);
|
||||
}
|
||||
}
|
||||
|
||||
migrateToFromTimes() {
|
||||
@ -210,6 +247,27 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
this.target.azureMonitor.metricNamespace = this.target.azureMonitor.metricDefinition;
|
||||
}
|
||||
|
||||
migrateApplicationInsightsKeys(): void {
|
||||
const appInsights = this.target.appInsights as any;
|
||||
|
||||
// Migrate old app insights data keys to match other datasources
|
||||
const mappings = {
|
||||
xaxis: 'timeColumn',
|
||||
yaxis: 'valueColumn',
|
||||
spliton: 'segmentColumn',
|
||||
groupBy: 'dimension',
|
||||
groupByOptions: 'dimensions',
|
||||
filter: 'dimensionFilter',
|
||||
} as { [old: string]: string };
|
||||
|
||||
for (const old in mappings) {
|
||||
if (appInsights[old]) {
|
||||
appInsights[mappings[old]] = appInsights[old];
|
||||
delete appInsights[old];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
replace(variable: string) {
|
||||
return this.templateSrv.replace(variable, this.panelCtrl.panel.scopedVars);
|
||||
}
|
||||
@ -424,6 +482,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
if (metadata.dimensions.length > 0) {
|
||||
this.target.azureMonitor.dimension = metadata.dimensions[0].value;
|
||||
}
|
||||
|
||||
return this.refresh();
|
||||
})
|
||||
.catch(this.handleQueryCtrlError.bind(this));
|
||||
@ -439,19 +498,34 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
return allowedTimeGrainsMs;
|
||||
}
|
||||
|
||||
getAutoInterval() {
|
||||
if (this.target.azureMonitor.timeGrain === 'auto') {
|
||||
generateAutoUnits(timeGrain: string, timeGrains: Array<{ value: string }>) {
|
||||
if (timeGrain === 'auto') {
|
||||
return TimegrainConverter.findClosestTimeGrain(
|
||||
this.templateSrv.getBuiltInIntervalValue(),
|
||||
_.map(this.target.azureMonitor.timeGrains, o =>
|
||||
TimegrainConverter.createKbnUnitFromISO8601Duration(o.value)
|
||||
) || ['1m', '5m', '15m', '30m', '1h', '6h', '12h', '1d']
|
||||
_.map(timeGrains, o => TimegrainConverter.createKbnUnitFromISO8601Duration(o.value)) || [
|
||||
'1m',
|
||||
'5m',
|
||||
'15m',
|
||||
'30m',
|
||||
'1h',
|
||||
'6h',
|
||||
'12h',
|
||||
'1d',
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
getAzureMonitorAutoInterval() {
|
||||
return this.generateAutoUnits(this.target.azureMonitor.timeGrain, this.target.azureMonitor.timeGrains);
|
||||
}
|
||||
|
||||
getApplicationInsightAutoInterval() {
|
||||
return this.generateAutoUnits(this.target.appInsights.timeGrain, this.target.appInsights.timeGrains);
|
||||
}
|
||||
|
||||
/* Azure Log Analytics */
|
||||
|
||||
getWorkspaces = () => {
|
||||
@ -521,7 +595,7 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
.getAppInsightsMetricMetadata(this.replace(this.target.appInsights.metricName))
|
||||
.then((aggData: { supportedAggTypes: string[]; supportedGroupBy: string[]; primaryAggType: string }) => {
|
||||
this.target.appInsights.aggOptions = aggData.supportedAggTypes;
|
||||
this.target.appInsights.groupByOptions = aggData.supportedGroupBy;
|
||||
this.target.appInsights.dimensions = aggData.supportedGroupBy;
|
||||
this.target.appInsights.aggregation = aggData.primaryAggType;
|
||||
return this.refresh();
|
||||
})
|
||||
@ -541,27 +615,41 @@ export class AzureMonitorQueryCtrl extends QueryCtrl {
|
||||
};
|
||||
|
||||
getAppInsightsGroupBySegments(query: any) {
|
||||
return _.map(this.target.appInsights.groupByOptions, option => {
|
||||
return _.map(this.target.appInsights.dimensions, (option: string) => {
|
||||
return { text: option, value: option };
|
||||
});
|
||||
}
|
||||
|
||||
resetAppInsightsGroupBy() {
|
||||
this.target.appInsights.groupBy = 'none';
|
||||
this.refresh();
|
||||
}
|
||||
|
||||
updateTimeGrainType() {
|
||||
if (this.target.appInsights.timeGrainType === 'specific') {
|
||||
this.target.appInsights.timeGrain = '1';
|
||||
this.target.appInsights.timeGrainUnit = 'minute';
|
||||
} else {
|
||||
this.target.appInsights.timeGrain = '';
|
||||
}
|
||||
this.target.appInsights.dimension = 'none';
|
||||
this.refresh();
|
||||
}
|
||||
|
||||
toggleEditorMode() {
|
||||
this.target.appInsights.rawQuery = !this.target.appInsights.rawQuery;
|
||||
}
|
||||
|
||||
updateTimeGrainType() {
|
||||
if (this.target.appInsights.timeGrainType === 'specific') {
|
||||
this.target.appInsights.timeGrainCount = '1';
|
||||
this.target.appInsights.timeGrainUnit = 'minute';
|
||||
this.target.appInsights.timeGrain = TimegrainConverter.createISO8601Duration(
|
||||
this.target.appInsights.timeGrainCount,
|
||||
this.target.appInsights.timeGrainUnit
|
||||
);
|
||||
} else {
|
||||
this.target.appInsights.timeGrainCount = '';
|
||||
this.target.appInsights.timeGrainUnit = '';
|
||||
}
|
||||
}
|
||||
|
||||
updateAppInsightsTimeGrain() {
|
||||
if (this.target.appInsights.timeGrainUnit && this.target.appInsights.timeGrainCount) {
|
||||
this.target.appInsights.timeGrain = TimegrainConverter.createISO8601Duration(
|
||||
this.target.appInsights.timeGrainCount,
|
||||
this.target.appInsights.timeGrainUnit
|
||||
);
|
||||
}
|
||||
this.refresh();
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { DataQuery, DataSourceJsonData } from '@grafana/ui';
|
||||
|
||||
export interface AzureMonitorQuery extends DataQuery {
|
||||
refId: string;
|
||||
format: string;
|
||||
subscription: string;
|
||||
azureMonitor: AzureMetricQuery;
|
||||
azureLogAnalytics: AzureLogsQuery;
|
||||
// appInsights: any;
|
||||
appInsights: ApplicationInsightsQuery;
|
||||
}
|
||||
|
||||
export interface AzureDataSourceJsonData extends DataSourceJsonData {
|
||||
@ -35,7 +36,6 @@ export interface AzureMetricQuery {
|
||||
metricName: string;
|
||||
timeGrainUnit: string;
|
||||
timeGrain: string;
|
||||
timeGrains: string[];
|
||||
allowedTimeGrainsMs: number[];
|
||||
aggregation: string;
|
||||
dimension: string;
|
||||
@ -50,6 +50,19 @@ export interface AzureLogsQuery {
|
||||
workspace: string;
|
||||
}
|
||||
|
||||
export interface ApplicationInsightsQuery {
|
||||
rawQuery: boolean;
|
||||
rawQueryString: any;
|
||||
metricName: string;
|
||||
timeGrainUnit: string;
|
||||
timeGrain: string;
|
||||
allowedTimeGrainsMs: number[];
|
||||
aggregation: string;
|
||||
dimension: string;
|
||||
dimensionFilter: string;
|
||||
alias: string;
|
||||
}
|
||||
|
||||
// Azure Monitor API Types
|
||||
|
||||
export interface AzureMonitorMetricDefinitionsResponse {
|
||||
|
Loading…
Reference in New Issue
Block a user