AzureMonitor: Remove deprecated code (#48328)

This commit is contained in:
Andres Martinez Gotor
2022-04-28 01:27:39 -07:00
committed by GitHub
parent 07bd261cff
commit 6edefe5147
42 changed files with 39 additions and 3523 deletions

View File

@@ -9,7 +9,6 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
@@ -125,7 +124,5 @@ func (s *Service) newResourceMux() *http.ServeMux {
mux.HandleFunc("/azuremonitor/", s.handleResourceReq(azureMonitor))
mux.HandleFunc("/loganalytics/", s.handleResourceReq(azureLogAnalytics))
mux.HandleFunc("/resourcegraph/", s.handleResourceReq(azureResourceGraph))
// Remove with Grafana 9
mux.HandleFunc("/appinsights/", s.handleResourceReq(deprecated.AppInsights))
return mux
}

View File

@@ -6,7 +6,6 @@ import (
"fmt"
"net/http"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
@@ -15,7 +14,6 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/metrics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
@@ -30,13 +28,6 @@ func ProvideService(cfg *setting.Cfg, httpClientProvider *httpclient.Provider, t
azureResourceGraph: &resourcegraph.AzureResourceGraphDatasource{Proxy: proxy},
}
// Insights Analytics and Application Insights were deprecated in Grafana 8.x and
// will be finally removed with Grafana 9
if setting.BuildVersion != "" && semver.MustParse(setting.BuildVersion).Compare(semver.MustParse("9.0.0-beta1")) < 0 {
executors[deprecated.InsightsAnalytics] = &deprecated.InsightsAnalyticsDatasource{Proxy: proxy}
executors[deprecated.AppInsights] = &deprecated.ApplicationInsightsDatasource{Proxy: proxy}
}
im := datasource.NewInstanceManager(NewInstanceSettings(cfg, httpClientProvider, executors))
s := &Service{

View File

@@ -14,41 +14,11 @@ import (
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProvideService(t *testing.T) {
t.Run("it should skip insight analytics and app insights with Grafana 9", func(t *testing.T) {
currentV := setting.BuildVersion
t.Cleanup(func() {
setting.BuildVersion = currentV
})
versions := []struct {
version string
shouldIncludeInsights bool
}{
{"8.5.0", true},
{"9.0.0-beta1", false},
{"9.0.0", false},
}
for _, v := range versions {
setting.BuildVersion = v.version
s := ProvideService(setting.NewCfg(), httpclient.NewProvider(), nil)
if v.shouldIncludeInsights {
assert.NotNil(t, s.executors[deprecated.InsightsAnalytics])
assert.NotNil(t, s.executors[deprecated.AppInsights])
} else {
assert.Nil(t, s.executors[deprecated.InsightsAnalytics])
assert.Nil(t, s.executors[deprecated.AppInsights])
}
}
})
}
func TestNewInstanceSettings(t *testing.T) {
tests := []struct {
name string

View File

@@ -1,269 +0,0 @@
package deprecated
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"path"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil"
)
// ApplicationInsightsDatasource calls the application insights query API.
type ApplicationInsightsDatasource struct {
Proxy types.ServiceProxy
}
// ApplicationInsightsQuery is the model that holds the information
// needed to make a metrics query to Application Insights, and the information
// used to parse the response.
type ApplicationInsightsQuery struct {
RefID string
TimeRange backend.TimeRange
// Text based raw query options.
ApiURL string
Params url.Values
Alias string
Target string
// These fields are used when parsing the response.
metricName string
dimensions []string
aggregation string
}
func (e *ApplicationInsightsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
func (e *ApplicationInsightsDatasource) ExecuteTimeSeriesQuery(ctx context.Context,
originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
queries, err := e.buildQueries(originalQueries)
if err != nil {
return nil, err
}
for _, query := range queries {
queryRes, err := e.executeQuery(ctx, query, dsInfo, client, url, tracer)
if err != nil {
return nil, err
}
result.Responses[query.RefID] = queryRes
}
return result, nil
}
func (e *ApplicationInsightsDatasource) buildQueries(queries []backend.DataQuery) ([]*ApplicationInsightsQuery, error) {
applicationInsightsQueries := []*ApplicationInsightsQuery{}
for _, query := range queries {
queryBytes, err := query.JSON.MarshalJSON()
if err != nil {
return nil, fmt.Errorf("failed to re-encode the Azure Application Insights query into JSON: %w", err)
}
queryJSONModel := insightsJSONQuery{}
err = json.Unmarshal(queryBytes, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Application Insights query object from JSON: %w", err)
}
insightsJSONModel := queryJSONModel.AppInsights
azlog.Debug("Application Insights", "target", insightsJSONModel)
azureURL := fmt.Sprintf("metrics/%s", insightsJSONModel.MetricName)
timeGrain := insightsJSONModel.TimeGrain
timeGrains := insightsJSONModel.AllowedTimeGrainsMs
// Previous versions of the query model don't specify a time grain, so we
// need to fallback to a default value
if timeGrain == "auto" || timeGrain == "" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
if timeGrain != "none" {
params.Add("interval", timeGrain)
}
params.Add("aggregation", insightsJSONModel.Aggregation)
dimensionFilter := strings.TrimSpace(insightsJSONModel.DimensionFilter)
if dimensionFilter != "" {
params.Add("filter", dimensionFilter)
}
if len(insightsJSONModel.Dimensions) != 0 {
params.Add("segment", strings.Join(insightsJSONModel.Dimensions, ","))
}
applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{
RefID: query.RefID,
TimeRange: query.TimeRange,
ApiURL: azureURL,
Params: params,
Alias: insightsJSONModel.Alias,
Target: params.Encode(),
metricName: insightsJSONModel.MetricName,
aggregation: insightsJSONModel.Aggregation,
dimensions: insightsJSONModel.Dimensions,
})
}
return applicationInsightsQueries, nil
}
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (
backend.DataResponse, error) {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, dsInfo, url)
if err != nil {
dataResponse.Error = err
return dataResponse, nil
}
req.URL.Path = path.Join(req.URL.Path, query.ApiURL)
req.URL.RawQuery = query.Params.Encode()
ctx, span := tracer.Start(ctx, "application insights query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String())
res, err := client.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse, nil
}
body, err := ioutil.ReadAll(res.Body)
defer func() {
if err := res.Body.Close(); err != nil {
azlog.Warn("Failed to close response body", "err", err)
}
}()
if err != nil {
return backend.DataResponse{}, err
}
if res.StatusCode/100 != 2 {
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return backend.DataResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
mr := MetricsResult{}
err = json.Unmarshal(body, &mr)
if err != nil {
return backend.DataResponse{}, err
}
frame, err := InsightsMetricsResultToFrame(mr, query.metricName, query.aggregation, query.dimensions)
if err != nil {
dataResponse.Error = err
return dataResponse, nil
}
applyInsightsMetricAlias(frame, query.Alias)
dataResponse.Frames = data.Frames{frame}
return dataResponse, nil
}
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
appInsightsAppID := dsInfo.Settings.AppInsightsAppId
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
azlog.Debug("Failed to create request", "error", err)
return nil, errutil.Wrap("Failed to create request", err)
}
req.URL.Path = fmt.Sprintf("/v1/apps/%s", appInsightsAppID)
return req, nil
}
// formatApplicationInsightsLegendKey builds the legend key or timeseries name
// Alias patterns like {{metric}} are replaced with the appropriate data values.
func formatApplicationInsightsLegendKey(alias string, metricName string, labels data.Labels) string {
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
keys = sort.StringSlice(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
switch metaPartName {
case "metric":
return []byte(metricName)
case "dimensionname", "groupbyname":
return []byte(keys[0])
case "dimensionvalue", "groupbyvalue":
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
func applyInsightsMetricAlias(frame *data.Frame, alias string) {
if alias == "" {
return
}
for _, field := range frame.Fields {
if field.Type() == data.FieldTypeTime || field.Type() == data.FieldTypeNullableTime {
continue
}
displayName := formatApplicationInsightsLegendKey(alias, field.Name, field.Labels)
if field.Config == nil {
field.Config = &data.FieldConfig{}
}
field.Config.DisplayName = displayName
}
}

View File

@@ -1,237 +0,0 @@
package deprecated
import (
"context"
"encoding/json"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
)
func TestApplicationInsightsDatasource(t *testing.T) {
t.Run("ApplicationInsightsDatasource", func(t *testing.T) {
datasource := &ApplicationInsightsDatasource{}
t.Run("Parse queries from frontend and build AzureMonitor API queries", func(t *testing.T) {
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
tsdbQuery := []backend.DataQuery{
{
TimeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
JSON: []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "PT1M",
"aggregation": "Average",
"metricName": "server/exceptions",
"alias": "testalias",
"queryType": "Application Insights"
}
}`),
RefID: "A",
Interval: 1234,
},
}
t.Run("and is a normal query", func(t *testing.T) {
queries, err := datasource.buildQueries(tsdbQuery)
require.NoError(t, err)
require.Equal(t, len(queries), 1)
require.Equal(t, queries[0].RefID, "A")
require.Equal(t, queries[0].ApiURL, "metrics/server/exceptions")
require.Equal(t, queries[0].Target, "aggregation=Average&interval=PT1M&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
require.Equal(t, len(queries[0].Params), 3)
require.Equal(t, queries[0].Params["timespan"][0], "2018-03-15T13:00:00Z/2018-03-15T13:34:00Z")
require.Equal(t, queries[0].Params["aggregation"][0], "Average")
require.Equal(t, queries[0].Params["interval"][0], "PT1M")
require.Equal(t, queries[0].Alias, "testalias")
})
t.Run("and has a time grain set to auto", func(t *testing.T) {
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "auto",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights"
}
}`)
var err error
tsdbQuery[0].Interval, err = time.ParseDuration("400s")
require.NoError(t, err)
queries, err := datasource.buildQueries(tsdbQuery)
require.NoError(t, err)
require.Equal(t, queries[0].Params["interval"][0], "PT15M")
})
t.Run("and has an empty time grain", func(t *testing.T) {
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights"
}
}`)
tsdbQuery[0].Interval, _ = time.ParseDuration("400s")
queries, err := datasource.buildQueries(tsdbQuery)
require.NoError(t, err)
require.Equal(t, queries[0].Params["interval"][0], "PT15M")
})
t.Run("and has a time grain set to auto and the metric has a limited list of allowed time grains", func(t *testing.T) {
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "auto",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights",
"allowedTimeGrainsMs": [60000, 300000]
}
}`)
tsdbQuery[0].Interval, _ = time.ParseDuration("400s")
queries, err := datasource.buildQueries(tsdbQuery)
require.NoError(t, err)
require.Equal(t, queries[0].Params["interval"][0], "PT5M")
})
t.Run("and has a dimension filter", func(t *testing.T) {
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "PT1M",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights",
"dimension": "blob",
"dimensionFilter": "blob eq '*'"
}
}`)
queries, err := datasource.buildQueries(tsdbQuery)
require.NoError(t, err)
require.Equal(t, queries[0].Target, "aggregation=Average&filter=blob+eq+%27%2A%27&interval=PT1M&segment=blob&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
require.Equal(t, queries[0].Params["filter"][0], "blob eq '*'")
})
t.Run("and has a dimension filter set to None", func(t *testing.T) {
tsdbQuery[0].JSON = []byte(`{
"appInsights": {
"rawQuery": false,
"timeGrain": "PT1M",
"aggregation": "Average",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Application Insights",
"dimension": "None"
}
}`)
queries, err := datasource.buildQueries(tsdbQuery)
require.NoError(t, err)
require.Equal(t, queries[0].Target, "aggregation=Average&interval=PT1M&timespan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z")
})
})
})
}
func TestInsightsDimensionsUnmarshalJSON(t *testing.T) {
a := []byte(`"foo"`)
b := []byte(`["foo"]`)
c := []byte(`["none"]`)
d := []byte(`["None"]`)
e := []byte("null")
f := []byte(`""`)
g := []byte(`"none"`)
var as InsightsDimensions
var bs InsightsDimensions
err := json.Unmarshal(a, &as)
require.NoError(t, err)
require.Equal(t, []string{"foo"}, []string(as))
err = json.Unmarshal(b, &bs)
require.NoError(t, err)
require.Equal(t, []string{"foo"}, []string(bs))
var cs InsightsDimensions
err = json.Unmarshal(c, &cs)
require.NoError(t, err)
require.Empty(t, cs)
var ds InsightsDimensions
err = json.Unmarshal(d, &ds)
require.NoError(t, err)
require.Empty(t, ds)
var es InsightsDimensions
err = json.Unmarshal(e, &es)
require.NoError(t, err)
require.Empty(t, es)
var fs InsightsDimensions
err = json.Unmarshal(f, &fs)
require.NoError(t, err)
require.Empty(t, fs)
var gs InsightsDimensions
err = json.Unmarshal(g, &gs)
require.NoError(t, err)
require.Empty(t, gs)
}
func TestAppInsightsCreateRequest(t *testing.T) {
ctx := context.Background()
url := "http://ds"
dsInfo := types.DatasourceInfo{
Settings: types.AzureMonitorSettings{AppInsightsAppId: "foo"},
DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "key",
},
}
tests := []struct {
name string
expectedURL string
Err require.ErrorAssertionFunc
}{
{
name: "creates a request",
expectedURL: "http://ds/v1/apps/foo",
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ds := ApplicationInsightsDatasource{}
req, err := ds.createRequest(ctx, dsInfo, url)
tt.Err(t, err)
if req.URL.String() != tt.expectedURL {
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
}
})
}
}

View File

@@ -1,314 +0,0 @@
package deprecated
import (
"encoding/json"
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// InsightsMetricsResultToFrame converts a MetricsResult (an Application Insights metrics query response) to a dataframe.
// Due to the dynamic nature of the MetricsResult object, the name of the metric, aggregation,
// and requested dimensions are used to determine the expected shape of the object.
// This builds all series into a single data.Frame with one time index (a wide formatted time series frame).
func InsightsMetricsResultToFrame(mr MetricsResult, metric, agg string, dimensions []string) (*data.Frame, error) {
dimLen := len(dimensions)
// The Response has both Start and End times, so we name the column "StartTime".
frame := data.NewFrame("", data.NewField("StartTime", nil, []time.Time{}))
fieldIdxMap := map[string]int{} // a map of a string representation of the labels to the Field index in the frame.
rowCounter := 0 // row in the resulting frame
if mr.Value == nil { // never seen this response, but to ensure there is no panic
return nil, fmt.Errorf("unexpected nil response or response value in metrics result")
}
for _, seg := range *mr.Value.Segments { // each top level segment in the response shares timestamps.
frame.Extend(1)
frame.Set(0, rowCounter, seg.Start) // field 0 is the time field
labels := data.Labels{}
// handleLeafSegment is for the leaf MetricsSegmentInfo nodes in the response.
// A leaf node contains an aggregated value, and when there are multiple dimensions, a label key/value pair.
handleLeafSegment := func(s MetricsSegmentInfo) error {
// since this is a dynamic response, everything we are interested in here from JSON
// is Marshalled (mapped) into the AdditionalProperties property.
v, err := valFromLeafAP(s.AdditionalProperties, metric, agg)
if err != nil {
return err
}
if dimLen != 0 { // when there are dimensions, the final dimension is in this inner segment.
dimension := dimensions[dimLen-1]
dimVal, err := dimValueFromAP(s.AdditionalProperties, dimension)
if err != nil {
return err
}
labels[dimension] = dimVal
}
if _, ok := fieldIdxMap[labels.String()]; !ok {
// When we find a new combination of labels for the metric, a new Field is appended.
frame.Fields = append(frame.Fields, data.NewField(metric, labels.Copy(), make([]*float64, rowCounter+1)))
fieldIdxMap[labels.String()] = len(frame.Fields) - 1
}
frame.Set(fieldIdxMap[labels.String()], rowCounter, v)
return nil
}
// Simple case with no segments/dimensions
if dimLen == 0 {
if err := handleLeafSegment(seg); err != nil {
return nil, err
}
rowCounter++
continue
}
// Multiple dimension case
var traverse func(segments *[]MetricsSegmentInfo, depth int) error
// traverse walks segments collecting dimensions into labels until leaf segments are
// reached, and then handleInnerSegment is called. The final k/v label pair is
// in the leaf segment.
// A non-recursive implementation would probably be better.
traverse = func(segments *[]MetricsSegmentInfo, depth int) error {
if segments == nil {
return nil
}
for _, seg := range *segments {
if seg.Segments == nil {
if err := handleLeafSegment(seg); err != nil {
return err
}
continue
}
dimension := dimensions[depth]
dimVal, err := dimValueFromAP(seg.AdditionalProperties, dimension)
if err != nil {
return err
}
labels[dimension] = dimVal
if err := traverse(seg.Segments, depth+1); err != nil {
return err
}
}
return nil
}
if err := traverse(seg.Segments, 0); err != nil {
return nil, err
}
rowCounter++
}
if len(frame.Fields) == 1 { // No data, only a time column, no sort
return frame, nil
}
if err := data.SortWideFrameFields(frame, dimensions...); err != nil {
return nil, err
}
return frame, nil
}
// valFromLeafAP extracts value for the given metric and aggregation (agg)
// from the dynamic AdditionalProperties properties of a leaf node. It is for use in the InsightsMetricsResultToFrame
// function.
func valFromLeafAP(ap map[string]interface{}, metric, agg string) (*float64, error) {
if ap == nil {
return nil, fmt.Errorf("expected additional properties for metric %v not found in leaf segment", metric)
}
met, ok := ap[metric]
if !ok {
return nil, fmt.Errorf("expected additional properties for metric %v not found in leaf segment", metric)
}
metMap, ok := met.(map[string]interface{})
if !ok {
return nil, fmt.Errorf("unexpected type for additional properties not found in leaf segment, want map[string]interface{}, but got %T", met)
}
metVal, ok := metMap[agg]
if !ok {
return nil, fmt.Errorf("expected value for aggregation %v not found in leaf segment", agg)
}
var v *float64
if val, ok := metVal.(float64); ok {
v = &val
}
return v, nil
}
// dimValueFromAP fetches the value as a string for the corresponding dimension from the dynamic AdditionalProperties properties of a leaf node. It is for use in the InsightsMetricsResultToFrame
// function.
func dimValueFromAP(ap map[string]interface{}, dimension string) (string, error) {
rawDimValue, ok := ap[dimension]
if !ok {
return "", fmt.Errorf("expected dimension key %v not found in response", dimension)
}
dimValue, ok := rawDimValue.(string)
if !ok {
return "", fmt.Errorf("unexpected non-string value for the value for dimension %v, got type %T with a value of %v", dimension, rawDimValue, dimValue)
}
return dimValue, nil
}
// MetricsResult a metric result.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights.
type MetricsResult struct {
Value *MetricsResultInfo `json:"value,omitempty"`
}
// MetricsResultInfo a metric result data.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
type MetricsResultInfo struct {
// AdditionalProperties - Unmatched properties from the message are deserialized this collection
AdditionalProperties map[string]interface{} `json:""`
// Start - Start time of the metric.
Start time.Time `json:"start,omitempty"`
// End - Start time of the metric.
End time.Time `json:"end,omitempty"`
// Interval - The interval used to segment the metric data.
Interval *string `json:"interval,omitempty"`
// Segments - Segmented metric data (if segmented).
Segments *[]MetricsSegmentInfo `json:"segments,omitempty"`
}
// MetricsSegmentInfo is a metric segment.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
type MetricsSegmentInfo struct {
// AdditionalProperties - Unmatched properties from the message are deserialized this collection
AdditionalProperties map[string]interface{} `json:""`
// Start - Start time of the metric segment (only when an interval was specified).
Start time.Time `json:"start,omitempty"`
// End - Start time of the metric segment (only when an interval was specified).
End time.Time `json:"end,omitempty"`
// Segments - Segmented metric data (if further segmented).
Segments *[]MetricsSegmentInfo `json:"segments,omitempty"`
}
// UnmarshalJSON is the custom unmarshaler for MetricsSegmentInfo struct.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
func (mri *MetricsSegmentInfo) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
default:
if v != nil {
var additionalProperties interface{}
err = json.Unmarshal(*v, &additionalProperties)
if err != nil {
return err
}
if mri.AdditionalProperties == nil {
mri.AdditionalProperties = make(map[string]interface{})
}
mri.AdditionalProperties[k] = additionalProperties
}
case "start":
if v != nil {
var start time.Time
err = json.Unmarshal(*v, &start)
if err != nil {
return err
}
mri.Start = start
}
case "end":
if v != nil {
var end time.Time
err = json.Unmarshal(*v, &end)
if err != nil {
return err
}
mri.End = end
}
case "segments":
if v != nil {
var segments []MetricsSegmentInfo
err = json.Unmarshal(*v, &segments)
if err != nil {
return err
}
mri.Segments = &segments
}
}
}
return nil
}
// UnmarshalJSON is the custom unmarshaler for MetricsResultInfo struct.
// This is copied from azure-sdk-for-go/services/preview/appinsights/v1/insights (except time Type is changed).
func (mri *MetricsResultInfo) UnmarshalJSON(body []byte) error {
var m map[string]*json.RawMessage
err := json.Unmarshal(body, &m)
if err != nil {
return err
}
for k, v := range m {
switch k {
default:
if v != nil {
var additionalProperties interface{}
err = json.Unmarshal(*v, &additionalProperties)
if err != nil {
return err
}
if mri.AdditionalProperties == nil {
mri.AdditionalProperties = make(map[string]interface{})
}
mri.AdditionalProperties[k] = additionalProperties
}
case "start":
if v != nil {
var start time.Time
err = json.Unmarshal(*v, &start)
if err != nil {
return err
}
mri.Start = start
}
case "end":
if v != nil {
var end time.Time
err = json.Unmarshal(*v, &end)
if err != nil {
return err
}
mri.End = end
}
case "interval":
if v != nil {
var interval string
err = json.Unmarshal(*v, &interval)
if err != nil {
return err
}
mri.Interval = &interval
}
case "segments":
if v != nil {
var segments []MetricsSegmentInfo
err = json.Unmarshal(*v, &segments)
if err != nil {
return err
}
mri.Segments = &segments
}
}
}
return nil
}

View File

@@ -1,192 +0,0 @@
package deprecated
import (
"encoding/json"
"os"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
)
func TestInsightsMetricsResultToFrame(t *testing.T) {
tests := []struct {
name string
testFile string
metric string
alias string
agg string
dimensions []string
expectedFrame func() *data.Frame
}{
{
name: "single series",
testFile: "applicationinsights/4-application-insights-response-metrics-no-segment.json",
metric: "value",
agg: "avg",
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2019, 9, 13, 1, 2, 3, 456789000, time.UTC),
time.Date(2019, 9, 13, 2, 2, 3, 456789000, time.UTC),
}),
data.NewField("value", nil, []*float64{
pointer.Float64(1),
pointer.Float64(2),
}),
)
return frame
},
},
{
name: "empty response",
testFile: "applicationinsights/5-application-insights-empty-response.json",
metric: "value",
agg: "avg",
expectedFrame: func() *data.Frame {
frame := data.NewFrame("", data.NewField("StartTime", nil, []time.Time{}))
return frame
},
},
{
name: "segmented series",
testFile: "applicationinsights/4-application-insights-response-metrics-segmented.json",
metric: "value",
agg: "avg",
dimensions: []string{"blob"},
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2019, 9, 13, 1, 2, 3, 456789000, time.UTC),
time.Date(2019, 9, 13, 2, 2, 3, 456789000, time.UTC),
}),
data.NewField("value", data.Labels{"blob": "a"}, []*float64{
pointer.Float64(1),
pointer.Float64(2),
}),
data.NewField("value", data.Labels{"blob": "b"}, []*float64{
pointer.Float64(3),
pointer.Float64(4),
}),
)
return frame
},
},
{
name: "multi segmented series",
testFile: "applicationinsights/4-application-insights-response-metrics-multi-segmented.json",
metric: "traces/count",
agg: "sum",
dimensions: []string{"client/countryOrRegion", "client/city"},
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2020, 6, 25, 16, 15, 32, 14e7, time.UTC),
time.Date(2020, 6, 25, 16, 16, 0, 0, time.UTC),
}),
data.NewField("traces/count", data.Labels{"client/city": "Tokyo", "client/countryOrRegion": "Japan"}, []*float64{
nil,
pointer.Float64(1),
}),
data.NewField("traces/count", data.Labels{"client/city": "", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(11),
}),
data.NewField("traces/count", data.Labels{"client/city": "Chicago", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(3),
}),
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
pointer.Float64(1),
}),
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
nil,
}),
)
return frame
},
},
{
name: "segmented series with alias",
testFile: "applicationinsights/4-application-insights-response-metrics-multi-segmented.json",
metric: "traces/count",
alias: "{{ metric }}: Country,City: {{ client/countryOrRegion }},{{ client/city }}",
agg: "sum",
dimensions: []string{"client/countryOrRegion", "client/city"},
expectedFrame: func() *data.Frame {
frame := data.NewFrame("",
data.NewField("StartTime", nil, []time.Time{
time.Date(2020, 6, 25, 16, 15, 32, 14e7, time.UTC),
time.Date(2020, 6, 25, 16, 16, 0, 0, time.UTC),
}),
data.NewField("traces/count", data.Labels{"client/city": "Tokyo", "client/countryOrRegion": "Japan"}, []*float64{
nil,
pointer.Float64(1),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: Japan,Tokyo"}),
data.NewField("traces/count", data.Labels{"client/city": "", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(11),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,"}),
data.NewField("traces/count", data.Labels{"client/city": "Chicago", "client/countryOrRegion": "United States"}, []*float64{
nil,
pointer.Float64(3),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Chicago"}),
data.NewField("traces/count", data.Labels{"client/city": "Des Moines", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
pointer.Float64(1),
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Des Moines"}),
data.NewField("traces/count", data.Labels{"client/city": "Washington", "client/countryOrRegion": "United States"}, []*float64{
pointer.Float64(2),
nil,
}).SetConfig(&data.FieldConfig{DisplayName: "traces/count: Country,City: United States,Washington"}),
)
return frame
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
res := loadInsightsMetricsResponse(t, tt.testFile)
frame, err := InsightsMetricsResultToFrame(res, tt.metric, tt.agg, tt.dimensions)
require.NoError(t, err)
applyInsightsMetricAlias(frame, tt.alias)
if diff := cmp.Diff(tt.expectedFrame(), frame, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func loadInsightsMetricsResponse(t *testing.T, name string) MetricsResult {
t.Helper()
path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test
// nolint:gosec
f, err := os.Open(path)
require.NoError(t, err)
defer func() {
err := f.Close()
require.NoError(t, err)
}()
d := json.NewDecoder(f)
var mr MetricsResult
err = d.Decode(&mr)
require.NoError(t, err)
return mr
}

View File

@@ -1,20 +0,0 @@
package deprecated
import (
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
)
func GetAppInsightsMiddleware(url, appInsightsApiKey string) httpclient.Middleware {
if appInsightsApiKey != "" && url == AzAppInsights.URL || url == AzChinaAppInsights.URL {
// Inject API-Key for AppInsights
return httpclient.MiddlewareFunc(func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
req.Header.Set("X-API-Key", appInsightsApiKey)
return next.RoundTrip(req)
})
})
}
return nil
}

View File

@@ -1,190 +0,0 @@
package deprecated
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"path"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil"
)
type InsightsAnalyticsDatasource struct {
Proxy types.ServiceProxy
}
type InsightsAnalyticsQuery struct {
RefID string
RawQuery string
InterpolatedQuery string
ResultFormat string
Params url.Values
Target string
}
func (e *InsightsAnalyticsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
func (e *InsightsAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context,
originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
queries, err := e.buildQueries(originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*InsightsAnalyticsQuery, error) {
iaQueries := []*InsightsAnalyticsQuery{}
for _, query := range queries {
qm := InsightsAnalyticsQuery{}
queryJSONModel := insightsAnalyticsJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Application Insights Analytics query object from JSON: %w", err)
}
qm.RawQuery = queryJSONModel.InsightsAnalytics.Query
qm.ResultFormat = queryJSONModel.InsightsAnalytics.ResultFormat
qm.RefID = query.RefID
if qm.RawQuery == "" {
return nil, fmt.Errorf("query is missing query string property")
}
qm.InterpolatedQuery, err = macros.KqlInterpolate(query, dsInfo, qm.RawQuery)
if err != nil {
return nil, err
}
qm.Params = url.Values{}
qm.Params.Add("query", qm.InterpolatedQuery)
qm.Target = qm.Params.Encode()
iaQueries = append(iaQueries, &qm)
}
return iaQueries, nil
}
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
dataResponseError := func(err error) backend.DataResponse {
dataResponse.Error = err
return dataResponse
}
req, err := e.createRequest(ctx, dsInfo, url)
if err != nil {
return dataResponseError(err)
}
req.URL.Path = path.Join(req.URL.Path, "query")
req.URL.RawQuery = query.Params.Encode()
ctx, span := tracer.Start(ctx, "application insights analytics query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
if err != nil {
azlog.Warn("failed to inject global tracer")
}
azlog.Debug("ApplicationInsights", "Request URL", req.URL.String())
res, err := client.Do(req)
if err != nil {
return dataResponseError(err)
}
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return dataResponseError(err)
}
defer func() {
if err := res.Body.Close(); err != nil {
azlog.Warn("Failed to close response body", "err", err)
}
}()
if res.StatusCode/100 != 2 {
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return dataResponseError(fmt.Errorf("request failed, status: %s, body: %s", res.Status, body))
}
var logResponse loganalytics.AzureLogAnalyticsResponse
d := json.NewDecoder(bytes.NewReader(body))
d.UseNumber()
err = d.Decode(&logResponse)
if err != nil {
return dataResponseError(err)
}
t, err := logResponse.GetPrimaryResultTable()
if err != nil {
return dataResponseError(err)
}
frame, err := loganalytics.ResponseTableToFrame(t)
if err != nil {
return dataResponseError(err)
}
if query.ResultFormat == types.TimeSeries {
tsSchema := frame.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeLong {
wideFrame, err := data.LongToWide(frame, nil)
if err == nil {
frame = wideFrame
} else {
frame.AppendNotices(data.Notice{
Severity: data.NoticeSeverityWarning,
Text: "could not convert frame to time series, returning raw table: " + err.Error(),
})
}
}
}
dataResponse.Frames = data.Frames{frame}
return dataResponse
}
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
appInsightsAppID := dsInfo.Settings.AppInsightsAppId
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
azlog.Debug("Failed to create request", "error", err)
return nil, errutil.Wrap("Failed to create request", err)
}
req.URL.Path = fmt.Sprintf("/v1/apps/%s", appInsightsAppID)
return req, nil
}

View File

@@ -1,45 +0,0 @@
package deprecated
import (
"context"
"net/http"
"testing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
)
func TestInsightsAnalyticsCreateRequest(t *testing.T) {
ctx := context.Background()
url := "http://ds"
dsInfo := types.DatasourceInfo{
Settings: types.AzureMonitorSettings{AppInsightsAppId: "foo"},
DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "key",
},
}
tests := []struct {
name string
expectedURL string
expectedHeaders http.Header
Err require.ErrorAssertionFunc
}{
{
name: "creates a request",
expectedURL: "http://ds/v1/apps/foo",
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ds := InsightsAnalyticsDatasource{}
req, err := ds.createRequest(ctx, dsInfo, url)
tt.Err(t, err)
if req.URL.String() != tt.expectedURL {
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
}
})
}
}

View File

@@ -1,23 +0,0 @@
package deprecated
import (
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// Azure cloud query types
const (
AppInsights = "Application Insights"
InsightsAnalytics = "Insights Analytics"
)
var AzAppInsights = types.AzRoute{
URL: "https://api.applicationinsights.io",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var AzChinaAppInsights = types.AzRoute{
URL: "https://api.applicationinsights.azure.cn",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}

View File

@@ -1,72 +0,0 @@
package deprecated
import (
"encoding/json"
"fmt"
"strings"
)
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query.
type insightsJSONQuery struct {
AppInsights struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimensions InsightsDimensions `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
MetricName string `json:"metricName"`
TimeGrain string `json:"timeGrain"`
} `json:"appInsights"`
Raw *bool `json:"raw"`
}
// InsightsDimensions will unmarshal from a JSON string, or an array of strings,
// into a string array. This exists to support an older query format which is updated
// when a user saves the query or it is sent from the front end, but may not be when
// alerting fetches the model.
type InsightsDimensions []string
// UnmarshalJSON fulfills the json.Unmarshaler interface type.
func (s *InsightsDimensions) UnmarshalJSON(data []byte) error {
*s = InsightsDimensions{}
if string(data) == "null" || string(data) == "" {
return nil
}
if strings.ToLower(string(data)) == `"none"` {
return nil
}
if data[0] == '[' {
var sa []string
err := json.Unmarshal(data, &sa)
if err != nil {
return err
}
dimensions := []string{}
for _, v := range sa {
if v == "none" || v == "None" {
continue
}
dimensions = append(dimensions, v)
}
*s = InsightsDimensions(dimensions)
return nil
}
var str string
err := json.Unmarshal(data, &str)
if err != nil {
return fmt.Errorf("could not parse %q as string or array: %w", string(data), err)
}
if str != "" {
*s = InsightsDimensions{str}
return nil
}
return nil
}
type insightsAnalyticsJSONQuery struct {
InsightsAnalytics struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"insightsAnalytics"`
}

View File

@@ -8,30 +8,12 @@ import (
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
func getMiddlewares(route types.AzRoute, model types.DatasourceInfo) ([]sdkhttpclient.Middleware, error) {
var middlewares []sdkhttpclient.Middleware
// Remove with Grafana 9
if apiKeyMiddleware := deprecated.GetAppInsightsMiddleware(route.URL, model.DecryptedSecureJSONData["appInsightsApiKey"]); apiKeyMiddleware != nil {
middlewares = append(middlewares, apiKeyMiddleware)
}
return middlewares, nil
}
func newHTTPClient(route types.AzRoute, model types.DatasourceInfo, cfg *setting.Cfg, clientProvider httpclient.Provider) (*http.Client, error) {
m, err := getMiddlewares(route, model)
if err != nil {
return nil, err
}
opts := sdkhttpclient.Options{
Headers: route.Headers,
Middlewares: m,
Headers: route.Headers,
}
// Use Azure credentials if the route has OAuth scopes configured

View File

@@ -10,62 +10,11 @@ import (
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestHttpClient_Middlewares(t *testing.T) {
tests := []struct {
name string
route types.AzRoute
model types.DatasourceInfo
expectedMiddlewares int
Err require.ErrorAssertionFunc
}{
{
name: "creates an HTTP client with a middleware due to an app key",
route: types.AzRoute{
URL: deprecated.AzAppInsights.URL,
Scopes: []string{},
},
model: types.DatasourceInfo{
Credentials: &azcredentials.AzureClientSecretCredentials{},
DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "foo",
},
},
expectedMiddlewares: 1,
Err: require.NoError,
},
{
name: "creates an HTTP client without a middleware",
route: types.AzRoute{
URL: "http://route",
Scopes: []string{},
},
model: types.DatasourceInfo{
Credentials: &azcredentials.AzureClientSecretCredentials{},
},
expectedMiddlewares: 0,
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
m, err := getMiddlewares(tt.route, tt.model)
require.NoError(t, err)
// Cannot test that the cli middleware works properly since the azcore sdk
// rejects the TLS certs (if provided)
if len(m) != tt.expectedMiddlewares {
t.Errorf("Unexpected middlewares: %v", m)
}
})
}
}
func TestHttpClient_AzureCredentials(t *testing.T) {
model := types.DatasourceInfo{
Credentials: &azcredentials.AzureManagedIdentityCredentials{},
@@ -76,7 +25,6 @@ func TestHttpClient_AzureCredentials(t *testing.T) {
t.Run("should have Azure middleware when scopes provided", func(t *testing.T) {
route := types.AzRoute{
URL: deprecated.AzAppInsights.URL,
Scopes: []string{"https://management.azure.com/.default"},
}
@@ -90,7 +38,6 @@ func TestHttpClient_AzureCredentials(t *testing.T) {
t.Run("should not have Azure middleware when scopes are not provided", func(t *testing.T) {
route := types.AzRoute{
URL: deprecated.AzAppInsights.URL,
Scopes: []string{},
}

View File

@@ -3,7 +3,6 @@ package azuremonitor
import (
"github.com/grafana/grafana-azure-sdk-go/azsettings"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
@@ -61,11 +60,9 @@ var (
// and the service to query (e.g. Azure Monitor or Azure Log Analytics)
routes = map[string]map[string]types.AzRoute{
azsettings.AzurePublic: {
azureMonitor: azManagement,
azureLogAnalytics: azLogAnalytics,
azureResourceGraph: azManagement,
deprecated.AppInsights: deprecated.AzAppInsights,
deprecated.InsightsAnalytics: deprecated.AzAppInsights,
azureMonitor: azManagement,
azureLogAnalytics: azLogAnalytics,
azureResourceGraph: azManagement,
},
azsettings.AzureUSGovernment: {
azureMonitor: azUSGovManagement,
@@ -76,11 +73,9 @@ var (
azureMonitor: azGermanyManagement,
},
azsettings.AzureChina: {
azureMonitor: azChinaManagement,
azureLogAnalytics: azChinaLogAnalytics,
azureResourceGraph: azChinaManagement,
deprecated.AppInsights: deprecated.AzChinaAppInsights,
deprecated.InsightsAnalytics: deprecated.AzChinaAppInsights,
azureMonitor: azChinaManagement,
azureLogAnalytics: azChinaLogAnalytics,
azureResourceGraph: azChinaManagement,
},
}
)