AzureMonitor: Move Application Insights and Insight Analytics to a deprecated package (#45834)

This commit is contained in:
Andres Martinez Gotor 2022-03-02 06:41:07 -08:00 committed by GitHub
parent 3427ae463d
commit 700f6863f2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 523 additions and 387 deletions

View File

@ -0,0 +1,23 @@
package azlog
import "github.com/grafana/grafana/pkg/infra/log"
var (
azlog = log.New("tsdb.azuremonitor")
)
func Warn(msg string, args ...interface{}) {
azlog.Warn(msg, args)
}
func Debug(msg string, args ...interface{}) {
azlog.Debug(msg, args)
}
func Error(msg string, args ...interface{}) {
azlog.Error(msg, args)
}
func Info(msg string, args ...interface{}) {
azlog.Info(msg, args)
}

View File

@ -8,6 +8,9 @@ import (
"strings"
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
func getTarget(original string) (target string, err error) {
@ -63,16 +66,16 @@ func (s *httpServiceProxy) Do(rw http.ResponseWriter, req *http.Request, cli *ht
return rw
}
func (s *Service) getDataSourceFromHTTPReq(req *http.Request) (datasourceInfo, error) {
func (s *Service) getDataSourceFromHTTPReq(req *http.Request) (types.DatasourceInfo, error) {
ctx := req.Context()
pluginContext := httpadapter.PluginConfigFromContext(ctx)
i, err := s.im.Get(pluginContext)
if err != nil {
return datasourceInfo{}, nil
return types.DatasourceInfo{}, nil
}
ds, ok := i.(datasourceInfo)
ds, ok := i.(types.DatasourceInfo)
if !ok {
return datasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance")
return types.DatasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance")
}
return ds, nil
}
@ -111,7 +114,7 @@ func (s *Service) handleResourceReq(subDataSource string) func(rw http.ResponseW
req.URL.Host = serviceURL.Host
req.URL.Scheme = serviceURL.Scheme
s.executors[subDataSource].resourceRequest(rw, req, service.HTTPClient)
s.executors[subDataSource].ResourceRequest(rw, req, service.HTTPClient)
}
}
@ -120,8 +123,9 @@ func (s *Service) handleResourceReq(subDataSource string) func(rw http.ResponseW
func (s *Service) newResourceMux() *http.ServeMux {
mux := http.NewServeMux()
mux.HandleFunc("/azuremonitor/", s.handleResourceReq(azureMonitor))
mux.HandleFunc("/appinsights/", s.handleResourceReq(appInsights))
mux.HandleFunc("/loganalytics/", s.handleResourceReq(azureLogAnalytics))
mux.HandleFunc("/resourcegraph/", s.handleResourceReq(azureResourceGraph))
// Remove with Grafana 9
mux.HandleFunc("/appinsights/", s.handleResourceReq(deprecated.AppInsights))
return mux
}

View File

@ -7,6 +7,8 @@ import (
"testing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/metrics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
)
@ -95,7 +97,7 @@ func Test_handleResourceReq(t *testing.T) {
proxy := &fakeProxy{}
s := Service{
im: &fakeInstance{
services: map[string]datasourceService{
services: map[string]types.DatasourceService{
azureMonitor: {
URL: routes[setting.AzurePublic][azureMonitor].URL,
HTTPClient: &http.Client{},
@ -103,8 +105,8 @@ func Test_handleResourceReq(t *testing.T) {
},
},
executors: map[string]azDatasourceExecutor{
azureMonitor: &AzureMonitorDatasource{
proxy: proxy,
azureMonitor: &metrics.AzureMonitorDatasource{
Proxy: proxy,
},
},
}

View File

@ -5,39 +5,38 @@ import (
"encoding/json"
"fmt"
"net/http"
"regexp"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
)
const (
timeSeries = "time_series"
)
var (
azlog = log.New("tsdb.azuremonitor")
legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/metrics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
func ProvideService(cfg *setting.Cfg, httpClientProvider *httpclient.Provider, tracer tracing.Tracer) *Service {
proxy := &httpServiceProxy{}
executors := map[string]azDatasourceExecutor{
azureMonitor: &AzureMonitorDatasource{proxy: proxy},
appInsights: &ApplicationInsightsDatasource{proxy: proxy},
azureLogAnalytics: &AzureLogAnalyticsDatasource{proxy: proxy},
insightsAnalytics: &InsightsAnalyticsDatasource{proxy: proxy},
azureResourceGraph: &AzureResourceGraphDatasource{proxy: proxy},
azureMonitor: &metrics.AzureMonitorDatasource{Proxy: proxy},
azureLogAnalytics: &loganalytics.AzureLogAnalyticsDatasource{Proxy: proxy},
azureResourceGraph: &resourcegraph.AzureResourceGraphDatasource{Proxy: proxy},
}
// Insights Analytics and Application Insights were deprecated in Grafana 8.x and
// will be finally removed with Grafana 9
if setting.BuildVersion != "" && semver.MustParse(setting.BuildVersion).Compare(semver.MustParse("9.0.0-beta1")) < 0 {
executors[deprecated.InsightsAnalytics] = &deprecated.InsightsAnalyticsDatasource{Proxy: proxy}
executors[deprecated.AppInsights] = &deprecated.ApplicationInsightsDatasource{Proxy: proxy}
}
im := datasource.NewInstanceManager(NewInstanceSettings(cfg, *httpClientProvider, executors))
s := &Service{
@ -60,10 +59,6 @@ func (s *Service) CallResource(ctx context.Context, req *backend.CallResourceReq
return s.resourceHandler.CallResource(ctx, req, sender)
}
type serviceProxy interface {
Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter
}
type Service struct {
im instancemgmt.InstanceManager
executors map[string]azDatasourceExecutor
@ -73,37 +68,13 @@ type Service struct {
tracer tracing.Tracer
}
type azureMonitorSettings struct {
SubscriptionId string `json:"subscriptionId"`
LogAnalyticsDefaultWorkspace string `json:"logAnalyticsDefaultWorkspace"`
AppInsightsAppId string `json:"appInsightsAppId"`
}
type datasourceInfo struct {
Cloud string
Credentials azcredentials.AzureCredentials
Settings azureMonitorSettings
Routes map[string]azRoute
Services map[string]datasourceService
JSONData map[string]interface{}
DecryptedSecureJSONData map[string]string
DatasourceID int64
OrgID int64
}
type datasourceService struct {
URL string
HTTPClient *http.Client
}
func getDatasourceService(cfg *setting.Cfg, clientProvider httpclient.Provider, dsInfo datasourceInfo, routeName string) (datasourceService, error) {
func getDatasourceService(cfg *setting.Cfg, clientProvider httpclient.Provider, dsInfo types.DatasourceInfo, routeName string) (types.DatasourceService, error) {
route := dsInfo.Routes[routeName]
client, err := newHTTPClient(route, dsInfo, cfg, clientProvider)
if err != nil {
return datasourceService{}, err
return types.DatasourceService{}, err
}
return datasourceService{
return types.DatasourceService{
URL: dsInfo.Routes[routeName].URL,
HTTPClient: client,
}, nil
@ -122,7 +93,7 @@ func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, e
return nil, fmt.Errorf("error reading settings: %w", err)
}
azMonitorSettings := azureMonitorSettings{}
azMonitorSettings := types.AzureMonitorSettings{}
err = json.Unmarshal(settings.JSONData, &azMonitorSettings)
if err != nil {
return nil, fmt.Errorf("error reading settings: %w", err)
@ -138,7 +109,7 @@ func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, e
return nil, fmt.Errorf("error getting credentials: %w", err)
}
model := datasourceInfo{
model := types.DatasourceInfo{
Cloud: cloud,
Credentials: credentials,
Settings: azMonitorSettings,
@ -146,7 +117,7 @@ func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, e
DecryptedSecureJSONData: settings.DecryptedSecureJSONData,
DatasourceID: settings.ID,
Routes: routes[cloud],
Services: map[string]datasourceService{},
Services: map[string]types.DatasourceService{},
}
for routeName := range executors {
@ -162,18 +133,18 @@ func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, e
}
type azDatasourceExecutor interface {
executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error)
resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client)
ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error)
ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client)
}
func (s *Service) getDataSourceFromPluginReq(req *backend.QueryDataRequest) (datasourceInfo, error) {
func (s *Service) getDataSourceFromPluginReq(req *backend.QueryDataRequest) (types.DatasourceInfo, error) {
i, err := s.im.Get(req.PluginContext)
if err != nil {
return datasourceInfo{}, err
return types.DatasourceInfo{}, err
}
dsInfo, ok := i.(datasourceInfo)
dsInfo, ok := i.(types.DatasourceInfo)
if !ok {
return datasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance")
return types.DatasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance")
}
dsInfo.OrgID = req.PluginContext.OrgID
return dsInfo, nil
@ -194,7 +165,7 @@ func (s *Service) newQueryMux() *datasource.QueryTypeMux {
if !ok {
return nil, fmt.Errorf("missing service for %s", dst)
}
return executor.executeTimeSeriesQuery(ctx, req.Queries, dsInfo, service.HTTPClient, service.URL, s.tracer)
return executor.ExecuteTimeSeriesQuery(ctx, req.Queries, dsInfo, service.HTTPClient, service.URL, s.tracer)
})
}
return mux

View File

@ -12,14 +12,45 @@ import (
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProvideService(t *testing.T) {
t.Run("it should skip insight analytics and app insights with Grafana 9", func(t *testing.T) {
currentV := setting.BuildVersion
t.Cleanup(func() {
setting.BuildVersion = currentV
})
versions := []struct {
version string
shouldIncludeInsights bool
}{
{"8.5.0", true},
{"9.0.0-beta1", false},
{"9.0.0", false},
}
for _, v := range versions {
setting.BuildVersion = v.version
s := ProvideService(setting.NewCfg(), httpclient.NewProvider(), nil)
if v.shouldIncludeInsights {
assert.NotNil(t, s.executors[deprecated.InsightsAnalytics])
assert.NotNil(t, s.executors[deprecated.AppInsights])
} else {
assert.Nil(t, s.executors[deprecated.InsightsAnalytics])
assert.Nil(t, s.executors[deprecated.AppInsights])
}
}
})
}
func TestNewInstanceSettings(t *testing.T) {
tests := []struct {
name string
settings backend.DataSourceInstanceSettings
expectedModel datasourceInfo
expectedModel types.DatasourceInfo
Err require.ErrorAssertionFunc
}{
{
@ -29,15 +60,15 @@ func TestNewInstanceSettings(t *testing.T) {
DecryptedSecureJSONData: map[string]string{"key": "value"},
ID: 40,
},
expectedModel: datasourceInfo{
expectedModel: types.DatasourceInfo{
Cloud: setting.AzurePublic,
Credentials: &azcredentials.AzureManagedIdentityCredentials{},
Settings: azureMonitorSettings{},
Settings: types.AzureMonitorSettings{},
Routes: routes[setting.AzurePublic],
JSONData: map[string]interface{}{"azureAuthType": "msi"},
DatasourceID: 40,
DecryptedSecureJSONData: map[string]string{"key": "value"},
Services: map[string]datasourceService{},
Services: map[string]types.DatasourceService{},
},
Err: require.NoError,
},
@ -62,12 +93,12 @@ func TestNewInstanceSettings(t *testing.T) {
}
type fakeInstance struct {
routes map[string]azRoute
services map[string]datasourceService
routes map[string]types.AzRoute
services map[string]types.DatasourceService
}
func (f *fakeInstance) Get(pluginContext backend.PluginContext) (instancemgmt.Instance, error) {
return datasourceInfo{
return types.DatasourceInfo{
Routes: f.routes,
Services: f.services,
}, nil
@ -83,10 +114,10 @@ type fakeExecutor struct {
expectedURL string
}
func (f *fakeExecutor) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
func (f *fakeExecutor) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
}
func (f *fakeExecutor) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client,
func (f *fakeExecutor) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
if client == nil {
f.t.Errorf("The HTTP client for %s is missing", f.queryType)
@ -124,7 +155,7 @@ func Test_newMux(t *testing.T) {
s := &Service{
im: &fakeInstance{
routes: routes[azureMonitorPublic],
services: map[string]datasourceService{
services: map[string]types.DatasourceService{
tt.queryType: {
URL: routes[azureMonitorPublic][tt.queryType].URL,
HTTPClient: &http.Client{},

View File

@ -1,4 +1,4 @@
package azuremonitor
package deprecated
import (
"context"
@ -15,6 +15,9 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp"
@ -22,7 +25,7 @@ import (
// ApplicationInsightsDatasource calls the application insights query API.
type ApplicationInsightsDatasource struct {
proxy serviceProxy
Proxy types.ServiceProxy
}
// ApplicationInsightsQuery is the model that holds the information
@ -44,12 +47,12 @@ type ApplicationInsightsQuery struct {
aggregation string
}
func (e *ApplicationInsightsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli)
func (e *ApplicationInsightsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context,
originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client,
func (e *ApplicationInsightsDatasource) ExecuteTimeSeriesQuery(ctx context.Context,
originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
@ -93,7 +96,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []backend.DataQuery
// Previous versions of the query model don't specify a time grain, so we
// need to fallback to a default value
if timeGrain == "auto" || timeGrain == "" {
timeGrain, err = setAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
@ -130,7 +133,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []backend.DataQuery
return applicationInsightsQueries, nil
}
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo datasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (
backend.DataResponse, error) {
dataResponse := backend.DataResponse{}
@ -194,7 +197,7 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
return dataResponse, nil
}
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
appInsightsAppID := dsInfo.Settings.AppInsightsAppId
req, err := http.NewRequest(http.MethodGet, url, nil)
@ -221,7 +224,7 @@ func formatApplicationInsightsLegendKey(alias string, metricName string, labels
}
keys = sort.StringSlice(keys)
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))

View File

@ -1,4 +1,4 @@
package azuremonitor
package deprecated
import (
"context"
@ -7,6 +7,7 @@ import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
)
@ -204,8 +205,8 @@ func TestInsightsDimensionsUnmarshalJSON(t *testing.T) {
func TestAppInsightsCreateRequest(t *testing.T) {
ctx := context.Background()
url := "http://ds"
dsInfo := datasourceInfo{
Settings: azureMonitorSettings{AppInsightsAppId: "foo"},
dsInfo := types.DatasourceInfo{
Settings: types.AzureMonitorSettings{AppInsightsAppId: "foo"},
DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "key",
},

View File

@ -1,4 +1,4 @@
package azuremonitor
package deprecated
import (
"encoding/json"

View File

@ -1,4 +1,4 @@
package azuremonitor
package deprecated
import (
"encoding/json"
@ -173,7 +173,7 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
func loadInsightsMetricsResponse(t *testing.T, name string) MetricsResult {
t.Helper()
path := filepath.Join("testdata", name)
path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test
// nolint:gosec
f, err := os.Open(path)

View File

@ -0,0 +1,20 @@
package deprecated
import (
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
)
func GetAppInsightsMiddleware(url, appInsightsApiKey string) httpclient.Middleware {
if appInsightsApiKey != "" && url == AzAppInsights.URL || url == AzChinaAppInsights.URL {
// Inject API-Key for AppInsights
return httpclient.MiddlewareFunc(func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
req.Header.Set("X-API-Key", appInsightsApiKey)
return next.RoundTrip(req)
})
})
}
return nil
}

View File

@ -1,4 +1,4 @@
package azuremonitor
package deprecated
import (
"bytes"
@ -13,13 +13,17 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp"
)
type InsightsAnalyticsDatasource struct {
proxy serviceProxy
Proxy types.ServiceProxy
}
type InsightsAnalyticsQuery struct {
@ -34,12 +38,12 @@ type InsightsAnalyticsQuery struct {
Target string
}
func (e *InsightsAnalyticsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli)
func (e *InsightsAnalyticsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context,
originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client,
func (e *InsightsAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context,
originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
@ -55,7 +59,7 @@ func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context
return result, nil
}
func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*InsightsAnalyticsQuery, error) {
func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*InsightsAnalyticsQuery, error) {
iaQueries := []*InsightsAnalyticsQuery{}
for _, query := range queries {
@ -74,7 +78,7 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
return nil, fmt.Errorf("query is missing query string property")
}
qm.InterpolatedQuery, err = KqlInterpolate(query, dsInfo, qm.RawQuery)
qm.InterpolatedQuery, err = macros.KqlInterpolate(query, dsInfo, qm.RawQuery)
if err != nil {
return nil, err
}
@ -88,7 +92,7 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
return iaQueries, nil
}
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo datasourceInfo, client *http.Client,
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
@ -136,7 +140,7 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return dataResponseError(fmt.Errorf("request failed, status: %s, body: %s", res.Status, body))
}
var logResponse AzureLogAnalyticsResponse
var logResponse loganalytics.AzureLogAnalyticsResponse
d := json.NewDecoder(bytes.NewReader(body))
d.UseNumber()
err = d.Decode(&logResponse)
@ -149,12 +153,12 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
return dataResponseError(err)
}
frame, err := ResponseTableToFrame(t)
frame, err := loganalytics.ResponseTableToFrame(t)
if err != nil {
return dataResponseError(err)
}
if query.ResultFormat == timeSeries {
if query.ResultFormat == types.TimeSeries {
tsSchema := frame.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeLong {
wideFrame, err := data.LongToWide(frame, nil)
@ -173,7 +177,7 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
return dataResponse
}
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
appInsightsAppID := dsInfo.Settings.AppInsightsAppId
req, err := http.NewRequest(http.MethodGet, url, nil)

View File

@ -1,18 +1,19 @@
package azuremonitor
package deprecated
import (
"context"
"net/http"
"testing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
)
func TestInsightsAnalyticsCreateRequest(t *testing.T) {
ctx := context.Background()
url := "http://ds"
dsInfo := datasourceInfo{
Settings: azureMonitorSettings{AppInsightsAppId: "foo"},
dsInfo := types.DatasourceInfo{
Settings: types.AzureMonitorSettings{AppInsightsAppId: "foo"},
DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "key",
},

View File

@ -0,0 +1,23 @@
package deprecated
import (
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// Azure cloud query types
const (
AppInsights = "Application Insights"
InsightsAnalytics = "Insights Analytics"
)
var AzAppInsights = types.AzRoute{
URL: "https://api.applicationinsights.io",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var AzChinaAppInsights = types.AzRoute{
URL: "https://api.applicationinsights.azure.cn",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}

View File

@ -0,0 +1,72 @@
package deprecated
import (
"encoding/json"
"fmt"
"strings"
)
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query.
type insightsJSONQuery struct {
AppInsights struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimensions InsightsDimensions `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
MetricName string `json:"metricName"`
TimeGrain string `json:"timeGrain"`
} `json:"appInsights"`
Raw *bool `json:"raw"`
}
// InsightsDimensions will unmarshal from a JSON string, or an array of strings,
// into a string array. This exists to support an older query format which is updated
// when a user saves the query or it is sent from the front end, but may not be when
// alerting fetches the model.
type InsightsDimensions []string
// UnmarshalJSON fulfills the json.Unmarshaler interface type.
func (s *InsightsDimensions) UnmarshalJSON(data []byte) error {
*s = InsightsDimensions{}
if string(data) == "null" || string(data) == "" {
return nil
}
if strings.ToLower(string(data)) == `"none"` {
return nil
}
if data[0] == '[' {
var sa []string
err := json.Unmarshal(data, &sa)
if err != nil {
return err
}
dimensions := []string{}
for _, v := range sa {
if v == "none" || v == "None" {
continue
}
dimensions = append(dimensions, v)
}
*s = InsightsDimensions(dimensions)
return nil
}
var str string
err := json.Unmarshal(data, &str)
if err != nil {
return fmt.Errorf("could not parse %q as string or array: %w", string(data), err)
}
if str != "" {
*s = InsightsDimensions{str}
return nil
}
return nil
}
type insightsAnalyticsJSONQuery struct {
InsightsAnalytics struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"insightsAnalytics"`
}

View File

@ -6,9 +6,11 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/aztokenprovider"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
func getMiddlewares(route azRoute, model datasourceInfo, cfg *setting.Cfg) ([]httpclient.Middleware, error) {
func getMiddlewares(route types.AzRoute, model types.DatasourceInfo, cfg *setting.Cfg) ([]httpclient.Middleware, error) {
middlewares := []httpclient.Middleware{}
if len(route.Scopes) > 0 {
@ -19,21 +21,15 @@ func getMiddlewares(route azRoute, model datasourceInfo, cfg *setting.Cfg) ([]ht
middlewares = append(middlewares, aztokenprovider.AuthMiddleware(tokenProvider, route.Scopes))
}
if _, ok := model.DecryptedSecureJSONData["appInsightsApiKey"]; ok && (route.URL == azAppInsights.URL || route.URL == azChinaAppInsights.URL) {
// Inject API-Key for AppInsights
apiKeyMiddleware := httpclient.MiddlewareFunc(func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
req.Header.Set("X-API-Key", model.DecryptedSecureJSONData["appInsightsApiKey"])
return next.RoundTrip(req)
})
})
// Remove with Grafana 9
if apiKeyMiddleware := deprecated.GetAppInsightsMiddleware(route.URL, model.DecryptedSecureJSONData["appInsightsApiKey"]); apiKeyMiddleware != nil {
middlewares = append(middlewares, apiKeyMiddleware)
}
return middlewares, nil
}
func newHTTPClient(route azRoute, model datasourceInfo, cfg *setting.Cfg, clientProvider httpclient.Provider) (*http.Client, error) {
func newHTTPClient(route types.AzRoute, model types.DatasourceInfo, cfg *setting.Cfg, clientProvider httpclient.Provider) (*http.Client, error) {
m, err := getMiddlewares(route, model, cfg)
if err != nil {
return nil, err

View File

@ -5,6 +5,8 @@ import (
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
)
@ -12,18 +14,18 @@ func Test_httpCliProvider(t *testing.T) {
cfg := &setting.Cfg{}
tests := []struct {
name string
route azRoute
model datasourceInfo
route types.AzRoute
model types.DatasourceInfo
expectedMiddlewares int
Err require.ErrorAssertionFunc
}{
{
name: "creates an HTTP client with a middleware due to the scope",
route: azRoute{
route: types.AzRoute{
URL: "http://route",
Scopes: []string{"http://route/.default"},
},
model: datasourceInfo{
model: types.DatasourceInfo{
Credentials: &azcredentials.AzureClientSecretCredentials{},
},
expectedMiddlewares: 1,
@ -31,11 +33,11 @@ func Test_httpCliProvider(t *testing.T) {
},
{
name: "creates an HTTP client with a middleware due to an app key",
route: azRoute{
URL: azAppInsights.URL,
route: types.AzRoute{
URL: deprecated.AzAppInsights.URL,
Scopes: []string{},
},
model: datasourceInfo{
model: types.DatasourceInfo{
Credentials: &azcredentials.AzureClientSecretCredentials{},
DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "foo",
@ -46,11 +48,11 @@ func Test_httpCliProvider(t *testing.T) {
},
{
name: "creates an HTTP client without a middleware",
route: azRoute{
route: types.AzRoute{
URL: "http://route",
Scopes: []string{},
},
model: datasourceInfo{
model: types.DatasourceInfo{
Credentials: &azcredentials.AzureClientSecretCredentials{},
},
expectedMiddlewares: 0,

View File

@ -1,4 +1,4 @@
package azuremonitor
package loganalytics
import (
"bytes"
@ -17,6 +17,9 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp"
@ -24,7 +27,7 @@ import (
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
type AzureLogAnalyticsDatasource struct {
proxy serviceProxy
Proxy types.ServiceProxy
}
// AzureLogAnalyticsQuery is the query request that is built from the saved values for
@ -39,15 +42,15 @@ type AzureLogAnalyticsQuery struct {
TimeRange backend.TimeRange
}
func (e *AzureLogAnalyticsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli)
func (e *AzureLogAnalyticsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client,
func (e *AzureLogAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
@ -63,7 +66,7 @@ func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context
return result, nil
}
func getApiURL(queryJSONModel logJSONQuery) string {
func getApiURL(queryJSONModel types.LogJSONQuery) string {
// Legacy queries only specify a Workspace GUID, which we need to use the old workspace-centric
// API URL for, and newer queries specifying a resource URI should use resource-centric API.
// However, legacy workspace queries using a `workspaces()` template variable will be resolved
@ -86,11 +89,11 @@ func getApiURL(queryJSONModel logJSONQuery) string {
}
}
func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureLogAnalyticsQuery, error) {
func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureLogAnalyticsQuery, error) {
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{}
for _, query := range queries {
queryJSONModel := logJSONQuery{}
queryJSONModel := types.LogJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Log Analytics query object from JSON: %w", err)
@ -101,13 +104,13 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
resultFormat := azureLogAnalyticsTarget.ResultFormat
if resultFormat == "" {
resultFormat = timeSeries
resultFormat = types.TimeSeries
}
apiURL := getApiURL(queryJSONModel)
params := url.Values{}
rawQuery, err := KqlInterpolate(query, dsInfo, azureLogAnalyticsTarget.Query, "TimeGenerated")
rawQuery, err := macros.KqlInterpolate(query, dsInfo, azureLogAnalyticsTarget.Query, "TimeGenerated")
if err != nil {
return nil, err
}
@ -127,7 +130,7 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
return azureLogAnalyticsQueries, nil
}
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo datasourceInfo, client *http.Client,
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
@ -204,7 +207,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
azlog.Warn("failed to add custom metadata to azure log analytics response", err)
}
if query.ResultFormat == timeSeries {
if query.ResultFormat == types.TimeSeries {
tsSchema := frame.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeLong {
wideFrame, err := data.LongToWide(frame, nil)
@ -220,7 +223,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
return dataResponse
}
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil {
azlog.Debug("Failed to create request", "error", err)
@ -232,9 +235,14 @@ func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo
return req, nil
}
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
type AzureLogAnalyticsResponse struct {
Tables []types.AzureResponseTable `json:"tables"`
}
// GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an
// error if there is no table by that name.
func (ar *AzureLogAnalyticsResponse) GetPrimaryResultTable() (*AzureResponseTable, error) {
func (ar *AzureLogAnalyticsResponse) GetPrimaryResultTable() (*types.AzureResponseTable, error) {
for _, t := range ar.Tables {
if t.Name == "PrimaryResult" {
return &t, nil

View File

@ -1,4 +1,4 @@
package azuremonitor
package loganalytics
import (
"context"
@ -12,6 +12,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
)
@ -37,7 +38,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
"resultFormat": "%s"
}
}`, timeSeries)),
}`, types.TimeSeries)),
RefID: "A",
TimeRange: timeRange,
},
@ -45,7 +46,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{
RefID: "A",
ResultFormat: timeSeries,
ResultFormat: types.TimeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
@ -54,7 +55,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
"resultFormat": "%s"
}
}`, timeSeries)),
}`, types.TimeSeries)),
Params: url.Values{"query": {"query=Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer"}},
Target: "query=query%3DPerf+%7C+where+%5B%27TimeGenerated%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27TimeGenerated%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+where+%5B%27Computer%27%5D+in+%28%27comp1%27%2C%27comp2%27%29+%7C+summarize+avg%28CounterValue%29+by+bin%28TimeGenerated%2C+34000ms%29%2C+Computer",
TimeRange: timeRange,
@ -74,14 +75,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf",
"resultFormat": "%s"
}
}`, timeSeries)),
}`, types.TimeSeries)),
RefID: "A",
},
},
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{
RefID: "A",
ResultFormat: timeSeries,
ResultFormat: types.TimeSeries,
URL: "v1/workspaces/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/query",
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
@ -90,7 +91,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf",
"resultFormat": "%s"
}
}`, timeSeries)),
}`, types.TimeSeries)),
Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf",
},
@ -109,14 +110,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf",
"resultFormat": "%s"
}
}`, timeSeries)),
}`, types.TimeSeries)),
RefID: "A",
},
},
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{
RefID: "A",
ResultFormat: timeSeries,
ResultFormat: types.TimeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
@ -125,7 +126,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf",
"resultFormat": "%s"
}
}`, timeSeries)),
}`, types.TimeSeries)),
Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf",
},
@ -144,14 +145,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf",
"resultFormat": "%s"
}
}`, timeSeries)),
}`, types.TimeSeries)),
RefID: "A",
},
},
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{
RefID: "A",
ResultFormat: timeSeries,
ResultFormat: types.TimeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics",
@ -160,7 +161,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf",
"resultFormat": "%s"
}
}`, timeSeries)),
}`, types.TimeSeries)),
Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf",
},
@ -171,7 +172,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(tt.queryModel, datasourceInfo{})
queries, err := datasource.buildQueries(tt.queryModel, types.DatasourceInfo{})
tt.Err(t, err)
if diff := cmp.Diff(tt.azureLogAnalyticsQueries[0], queries[0]); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
@ -183,7 +184,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
func TestLogAnalyticsCreateRequest(t *testing.T) {
ctx := context.Background()
url := "http://ds"
dsInfo := datasourceInfo{}
dsInfo := types.DatasourceInfo{}
tests := []struct {
name string
@ -216,9 +217,9 @@ func TestLogAnalyticsCreateRequest(t *testing.T) {
func Test_executeQueryErrorWithDifferentLogAnalyticsCreds(t *testing.T) {
ds := AzureLogAnalyticsDatasource{}
dsInfo := datasourceInfo{
Services: map[string]datasourceService{
azureLogAnalytics: {URL: "http://ds"},
dsInfo := types.DatasourceInfo{
Services: map[string]types.DatasourceService{
"Azure Log Analytics": {URL: "http://ds"},
},
JSONData: map[string]interface{}{
"azureLogAnalyticsSameAs": false,
@ -231,7 +232,7 @@ func Test_executeQueryErrorWithDifferentLogAnalyticsCreds(t *testing.T) {
}
tracer, err := tracing.InitializeTracerForTest()
require.NoError(t, err)
res := ds.executeQuery(ctx, query, dsInfo, &http.Client{}, dsInfo.Services[azureLogAnalytics].URL, tracer)
res := ds.executeQuery(ctx, query, dsInfo, &http.Client{}, dsInfo.Services["Azure Log Analytics"].URL, tracer)
if res.Error == nil {
t.Fatal("expecting an error")
}

View File

@ -1,4 +1,4 @@
package azuremonitor
package loganalytics
import (
"encoding/json"
@ -8,10 +8,11 @@ import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// ResponseTableToFrame converts an AzureResponseTable to a data.Frame.
func ResponseTableToFrame(table *AzureResponseTable) (*data.Frame, error) {
func ResponseTableToFrame(table *types.AzureResponseTable) (*data.Frame, error) {
converterFrame, err := converterFrameForTable(table)
if err != nil {
return nil, err
@ -27,7 +28,7 @@ func ResponseTableToFrame(table *AzureResponseTable) (*data.Frame, error) {
return converterFrame.Frame, nil
}
func converterFrameForTable(t *AzureResponseTable) (*data.FrameInputConverter, error) {
func converterFrameForTable(t *types.AzureResponseTable) (*data.FrameInputConverter, error) {
converters := []data.FieldConverter{}
colNames := make([]string, len(t.Columns))
colTypes := make([]string, len(t.Columns)) // for metadata

View File

@ -1,4 +1,4 @@
package azuremonitor
package loganalytics
import (
"encoding/json"
@ -156,7 +156,7 @@ func TestLogTableToFrame(t *testing.T) {
func loadLogAnalyticsTestFileWithNumber(t *testing.T, name string) AzureLogAnalyticsResponse {
t.Helper()
path := filepath.Join("testdata", name)
path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test
// nolint:gosec
f, err := os.Open(path)

View File

@ -1,4 +1,4 @@
package azuremonitor
package macros
import (
"fmt"
@ -9,6 +9,8 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/tsdb/legacydata/interval"
)
@ -31,7 +33,7 @@ type kqlMacroEngine struct {
// - $__escapeMulti('\\vm\eth0\Total','\\vm\eth2\Total') -> @'\\vm\eth0\Total',@'\\vm\eth2\Total'
// KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries
func KqlInterpolate(query backend.DataQuery, dsInfo datasourceInfo, kql string, defaultTimeField ...string) (string, error) {
func KqlInterpolate(query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField ...string) (string, error) {
engine := kqlMacroEngine{}
defaultTimeFieldForAllDatasources := "timestamp"
@ -41,7 +43,7 @@ func KqlInterpolate(query backend.DataQuery, dsInfo datasourceInfo, kql string,
return engine.Interpolate(query, dsInfo, kql, defaultTimeFieldForAllDatasources)
}
func (m *kqlMacroEngine) Interpolate(query backend.DataQuery, dsInfo datasourceInfo, kql string, defaultTimeField string) (string, error) {
func (m *kqlMacroEngine) Interpolate(query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField string) (string, error) {
m.timeRange = query.TimeRange
m.query = query
rExp, _ := regexp.Compile(sExpr)
@ -86,7 +88,7 @@ func (m *kqlMacroEngine) Interpolate(query backend.DataQuery, dsInfo datasourceI
return kql, nil
}
func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, args []string, dsInfo datasourceInfo) (string, error) {
func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, args []string, dsInfo types.DatasourceInfo) (string, error) {
switch name {
case "timeFilter":
timeColumn := defaultTimeField

View File

@ -1,4 +1,4 @@
package azuremonitor
package macros
import (
"testing"
@ -7,6 +7,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
)
@ -125,7 +126,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
defaultTimeField := "TimeGenerated"
rawQuery, err := KqlInterpolate(tt.query, datasourceInfo{}, tt.kql, defaultTimeField)
rawQuery, err := KqlInterpolate(tt.query, types.DatasourceInfo{}, tt.kql, defaultTimeField)
tt.Err(t, err)
if diff := cmp.Diff(tt.expected, rawQuery, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)

View File

@ -1,4 +1,4 @@
package azuremonitor
package metrics
import (
"context"
@ -16,6 +16,10 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp"
@ -23,28 +27,25 @@ import (
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
proxy serviceProxy
Proxy types.ServiceProxy
}
var (
// 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds
defaultAllowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000}
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
)
const azureMonitorAPIVersion = "2018-01-01"
func (e *AzureMonitorDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli)
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client,
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
@ -60,12 +61,12 @@ func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, ori
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureMonitorQuery, error) {
azureMonitorQueries := []*AzureMonitorQuery{}
func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := azureMonitorJSONQuery{}
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
@ -93,7 +94,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = setAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
@ -135,7 +136,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
azlog.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
UrlComponents: urlComponents,
Target: target,
@ -149,7 +150,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, dsInfo datasourceInfo, cli *http.Client,
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
@ -191,7 +192,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
return dataResponse
}
azurePortalUrl, err := getAzurePortalUrl(dsInfo.Cloud)
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
@ -206,7 +207,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) {
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil {
azlog.Debug("Failed to create request", "error", err)
@ -218,28 +219,28 @@ func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datas
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMonitorResponse, error) {
func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (types.AzureMonitorResponse, error) {
body, err := ioutil.ReadAll(res.Body)
if err != nil {
return AzureMonitorResponse{}, err
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data AzureMonitorResponse
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
azlog.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return AzureMonitorResponse{}, err
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
@ -303,7 +304,7 @@ func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *
frame.SetRow(i, point.TimeStamp, value)
}
frameWithLink := addConfigLinks(*frame, queryUrl)
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
@ -311,7 +312,7 @@ func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *
}
// Gets the deep link for the given query
func getQueryUrl(query *AzureMonitorQuery, azurePortalUrl string) (string, error) {
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
@ -343,7 +344,7 @@ func getQueryUrl(query *AzureMonitorQuery, azurePortalUrl string) (string, error
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []metricChartDefinition{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": id,
@ -351,7 +352,7 @@ func getQueryUrl(query *AzureMonitorQuery, azurePortalUrl string) (string, error
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: metricVisualization{
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: query.UrlComponents["resourceName"],
},
@ -387,7 +388,7 @@ func formatAzureMonitorLegendKey(alias string, resourceName string, metricName s
}
keys = sort.StringSlice(keys)
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))

View File

@ -1,4 +1,4 @@
package azuremonitor
package metrics
import (
"context"
@ -16,14 +16,16 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require"
ptr "github.com/xorcare/pointer"
)
func TestAzureMonitorBuildQueries(t *testing.T) {
datasource := &AzureMonitorDatasource{}
dsInfo := datasourceInfo{
Settings: azureMonitorSettings{
dsInfo := types.DatasourceInfo{
Settings: types.AzureMonitorSettings{
SubscriptionId: "default-subscription",
},
}
@ -96,7 +98,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
name: "has dimensionFilter*s* property with one dimension",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []azureMonitorDimensionFilter{{"blob", "eq", "*"}},
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: "*"}},
"top": "30",
},
queryInterval: duration,
@ -107,7 +109,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
name: "has dimensionFilter*s* property with two dimensions",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []azureMonitorDimensionFilter{{"blob", "eq", "*"}, {"tier", "eq", "*"}},
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: "*"}, {Dimension: "tier", Operator: "eq", Filter: "*"}},
"top": "30",
},
queryInterval: duration,
@ -149,7 +151,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
},
}
azureMonitorQuery := &AzureMonitorQuery{
azureMonitorQuery := &types.AzureMonitorQuery{
URL: "12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
UrlComponents: map[string]string{
"metricDefinition": "Microsoft.Compute/virtualMachines",
@ -168,7 +170,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
queries, err := datasource.buildQueries(tsdbQuery, dsInfo)
require.NoError(t, err)
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(AzureMonitorQuery{}, "Params")); diff != "" {
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, "Params")); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
@ -219,14 +221,14 @@ func TestAzureMonitorParseResponse(t *testing.T) {
tests := []struct {
name string
responseFile string
mockQuery *AzureMonitorQuery
mockQuery *types.AzureMonitorQuery
expectedFrames data.Frames
queryIntervalMS int64
}{
{
name: "average aggregate time series response",
responseFile: "1-azure-monitor-response-avg.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{
"resourceName": "grafana",
},
@ -247,7 +249,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "total aggregate time series response",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{
"resourceName": "grafana",
},
@ -268,7 +270,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "maximum aggregate time series response",
responseFile: "3-azure-monitor-response-maximum.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{
"resourceName": "grafana",
},
@ -289,7 +291,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "minimum aggregate time series response",
responseFile: "4-azure-monitor-response-minimum.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{
"resourceName": "grafana",
},
@ -310,7 +312,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "count aggregate time series response",
responseFile: "5-azure-monitor-response-count.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{
"resourceName": "grafana",
},
@ -331,7 +333,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "single dimension time series response",
responseFile: "6-azure-monitor-response-single-dimension.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{
"resourceName": "grafana",
},
@ -365,7 +367,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "with alias patterns in the query",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
UrlComponents: map[string]string{
"resourceName": "grafana",
@ -387,7 +389,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "single dimension with alias",
responseFile: "6-azure-monitor-response-single-dimension.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
Alias: "{{dimensionname}}={{DimensionValue}}",
UrlComponents: map[string]string{
"resourceName": "grafana",
@ -424,7 +426,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "multiple dimension time series response with label alias",
responseFile: "7-azure-monitor-response-multi-dimension.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{Tier}}}",
UrlComponents: map[string]string{
"resourceName": "grafana",
@ -462,7 +464,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{
name: "unspecified unit with alias should not panic",
responseFile: "8-azure-monitor-response-unspecified-unit.json",
mockQuery: &AzureMonitorQuery{
mockQuery: &types.AzureMonitorQuery{
Alias: "custom",
UrlComponents: map[string]string{
"resourceName": "grafana",
@ -540,22 +542,22 @@ func TestFindClosestAllowIntervalMS(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
interval := findClosestAllowedIntervalMS(tt.inputInterval, tt.allowedTimeGrains)
interval := azTime.FindClosestAllowedIntervalMS(tt.inputInterval, tt.allowedTimeGrains)
require.Equal(t, tt.expectedInterval, interval)
})
}
}
func loadTestFile(t *testing.T, name string) AzureMonitorResponse {
func loadTestFile(t *testing.T, name string) types.AzureMonitorResponse {
t.Helper()
path := filepath.Join("testdata", name)
path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test
// nolint:gosec
jsonBody, err := ioutil.ReadFile(path)
require.NoError(t, err)
var azData AzureMonitorResponse
var azData types.AzureMonitorResponse
err = json.Unmarshal(jsonBody, &azData)
require.NoError(t, err)
return azData
@ -563,7 +565,7 @@ func loadTestFile(t *testing.T, name string) AzureMonitorResponse {
func TestAzureMonitorCreateRequest(t *testing.T) {
ctx := context.Background()
dsInfo := datasourceInfo{}
dsInfo := types.DatasourceInfo{}
url := "http://ds/"
tests := []struct {

View File

@ -1,4 +1,4 @@
package azuremonitor
package metrics
import (
"fmt"

View File

@ -1,4 +1,4 @@
package azuremonitor
package metrics
import (
"testing"

View File

@ -1,4 +1,4 @@
package azuremonitor
package resourcegraph
import (
"bytes"
@ -17,14 +17,23 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp"
)
// AzureResourceGraphResponse is the json response object from the Azure Resource Graph Analytics API.
type AzureResourceGraphResponse struct {
Data types.AzureResponseTable `json:"data"`
}
// AzureResourceGraphDatasource calls the Azure Resource Graph API's
type AzureResourceGraphDatasource struct {
proxy serviceProxy
Proxy types.ServiceProxy
}
// AzureResourceGraphQuery is the query request that is built from the saved values for
@ -41,15 +50,15 @@ type AzureResourceGraphQuery struct {
const argAPIVersion = "2021-06-01-preview"
const argQueryProviderName = "/providers/Microsoft.ResourceGraph/resources"
func (e *AzureResourceGraphDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli)
func (e *AzureResourceGraphDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. builds the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client,
func (e *AzureResourceGraphDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{},
@ -67,7 +76,14 @@ func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Contex
return result, nil
}
func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureResourceGraphQuery, error) {
type argJSONQuery struct {
AzureResourceGraph struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"azureResourceGraph"`
}
func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureResourceGraphQuery, error) {
var azureResourceGraphQueries []*AzureResourceGraphQuery
for _, query := range queries {
@ -85,7 +101,7 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery,
resultFormat = "table"
}
interpolatedQuery, err := KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query)
interpolatedQuery, err := macros.KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query)
if err != nil {
return nil, err
@ -103,7 +119,7 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery,
return azureResourceGraphQueries, nil
}
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo datasourceInfo, client *http.Client,
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo types.DatasourceInfo, client *http.Client,
dsURL string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
@ -173,18 +189,18 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
return dataResponseErrorWithExecuted(err)
}
frame, err := ResponseTableToFrame(&argResponse.Data)
frame, err := loganalytics.ResponseTableToFrame(&argResponse.Data)
if err != nil {
return dataResponseErrorWithExecuted(err)
}
azurePortalUrl, err := getAzurePortalUrl(dsInfo.Cloud)
azurePortalUrl, err := GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
return dataResponseErrorWithExecuted(err)
}
url := azurePortalUrl + "/#blade/HubsExtension/ArgQueryBlade/query/" + url.PathEscape(query.InterpolatedQuery)
frameWithLink := addConfigLinks(*frame, url)
frameWithLink := AddConfigLinks(*frame, url)
if frameWithLink.Meta == nil {
frameWithLink.Meta = &data.FrameMeta{}
}
@ -194,7 +210,7 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
return dataResponse
}
func addConfigLinks(frame data.Frame, dl string) data.Frame {
func AddConfigLinks(frame data.Frame, dl string) data.Frame {
for i := range frame.Fields {
if frame.Fields[i].Config == nil {
frame.Fields[i].Config = &data.FieldConfig{}
@ -209,7 +225,7 @@ func addConfigLinks(frame data.Frame, dl string) data.Frame {
return frame
}
func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, reqBody []byte, url string) (*http.Request, error) {
func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, reqBody []byte, url string) (*http.Request, error) {
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(reqBody))
if err != nil {
azlog.Debug("Failed to create request", "error", err)
@ -250,7 +266,7 @@ func (e *AzureResourceGraphDatasource) unmarshalResponse(res *http.Response) (Az
return data, nil
}
func getAzurePortalUrl(azureCloud string) (string, error) {
func GetAzurePortalUrl(azureCloud string) (string, error) {
switch azureCloud {
case setting.AzurePublic:
return "https://portal.azure.com", nil

View File

@ -1,4 +1,4 @@
package azuremonitor
package resourcegraph
import (
"context"
@ -14,6 +14,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -68,7 +69,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(tt.queryModel, datasourceInfo{})
queries, err := datasource.buildQueries(tt.queryModel, types.DatasourceInfo{})
tt.Err(t, err)
if diff := cmp.Diff(tt.azureResourceGraphQueries, queries, cmpopts.IgnoreUnexported(simplejson.Json{})); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
@ -80,7 +81,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
func TestAzureResourceGraphCreateRequest(t *testing.T) {
ctx := context.Background()
url := "http://ds"
dsInfo := datasourceInfo{}
dsInfo := types.DatasourceInfo{}
tests := []struct {
name string
@ -120,7 +121,7 @@ func TestAddConfigData(t *testing.T) {
frame := data.Frame{
Fields: []*data.Field{&field},
}
frameWithLink := addConfigLinks(frame, "http://ds")
frameWithLink := AddConfigLinks(frame, "http://ds")
expectedFrameWithLink := data.Frame{
Fields: []*data.Field{
{
@ -145,7 +146,7 @@ func TestGetAzurePortalUrl(t *testing.T) {
}
for _, cloud := range clouds {
azurePortalUrl, err := getAzurePortalUrl(cloud)
azurePortalUrl, err := GetAzurePortalUrl(cloud)
if err != nil {
t.Errorf("The cloud not supported")
}

View File

@ -1,71 +1,55 @@
package azuremonitor
import "github.com/grafana/grafana/pkg/setting"
import (
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// Azure cloud query types
const (
azureMonitor = "Azure Monitor"
appInsights = "Application Insights"
azureLogAnalytics = "Azure Log Analytics"
insightsAnalytics = "Insights Analytics"
azureResourceGraph = "Azure Resource Graph"
)
type azRoute struct {
URL string
Scopes []string
Headers map[string]string
}
var azManagement = azRoute{
var azManagement = types.AzRoute{
URL: "https://management.azure.com",
Scopes: []string{"https://management.azure.com/.default"},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var azUSGovManagement = azRoute{
var azUSGovManagement = types.AzRoute{
URL: "https://management.usgovcloudapi.net",
Scopes: []string{"https://management.usgovcloudapi.net/.default"},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var azGermanyManagement = azRoute{
var azGermanyManagement = types.AzRoute{
URL: "https://management.microsoftazure.de",
Scopes: []string{"https://management.microsoftazure.de/.default"},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var azChinaManagement = azRoute{
var azChinaManagement = types.AzRoute{
URL: "https://management.chinacloudapi.cn",
Scopes: []string{"https://management.chinacloudapi.cn/.default"},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var azAppInsights = azRoute{
URL: "https://api.applicationinsights.io",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var azChinaAppInsights = azRoute{
URL: "https://api.applicationinsights.azure.cn",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var azLogAnalytics = azRoute{
var azLogAnalytics = types.AzRoute{
URL: "https://api.loganalytics.io",
Scopes: []string{"https://api.loganalytics.io/.default"},
Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"},
}
var azChinaLogAnalytics = azRoute{
var azChinaLogAnalytics = types.AzRoute{
URL: "https://api.loganalytics.azure.cn",
Scopes: []string{"https://api.loganalytics.azure.cn/.default"},
Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"},
}
var azUSGovLogAnalytics = azRoute{
var azUSGovLogAnalytics = types.AzRoute{
URL: "https://api.loganalytics.us",
Scopes: []string{"https://api.loganalytics.us/.default"},
Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"},
@ -74,13 +58,13 @@ var azUSGovLogAnalytics = azRoute{
var (
// The different Azure routes are identified by its cloud (e.g. public or gov)
// and the service to query (e.g. Azure Monitor or Azure Log Analytics)
routes = map[string]map[string]azRoute{
routes = map[string]map[string]types.AzRoute{
setting.AzurePublic: {
azureMonitor: azManagement,
azureLogAnalytics: azLogAnalytics,
azureResourceGraph: azManagement,
appInsights: azAppInsights,
insightsAnalytics: azAppInsights,
azureMonitor: azManagement,
azureLogAnalytics: azLogAnalytics,
azureResourceGraph: azManagement,
deprecated.AppInsights: deprecated.AzAppInsights,
deprecated.InsightsAnalytics: deprecated.AzAppInsights,
},
setting.AzureUSGovernment: {
azureMonitor: azUSGovManagement,
@ -91,11 +75,11 @@ var (
azureMonitor: azGermanyManagement,
},
setting.AzureChina: {
azureMonitor: azChinaManagement,
azureLogAnalytics: azChinaLogAnalytics,
azureResourceGraph: azChinaManagement,
appInsights: azChinaAppInsights,
insightsAnalytics: azChinaAppInsights,
azureMonitor: azChinaManagement,
azureLogAnalytics: azChinaLogAnalytics,
azureResourceGraph: azChinaManagement,
deprecated.AppInsights: deprecated.AzChinaAppInsights,
deprecated.InsightsAnalytics: deprecated.AzChinaAppInsights,
},
}
)

View File

@ -1,10 +1,15 @@
package azuremonitor
package time
// setAutoTimeGrain tries to find the closest interval to the query's intervalMs value
var (
// 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds
defaultAllowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000}
)
// SetAutoTimeGrain tries to find the closest interval to the query's intervalMs value
// if the metric has a limited set of possible intervals/time grains then use those
// instead of the default list of intervals
func setAutoTimeGrain(intervalMs int64, timeGrains []int64) (string, error) {
autoInterval := findClosestAllowedIntervalMS(intervalMs, timeGrains)
func SetAutoTimeGrain(intervalMs int64, timeGrains []int64) (string, error) {
autoInterval := FindClosestAllowedIntervalMS(intervalMs, timeGrains)
tg := &TimeGrain{}
autoTimeGrain, err := tg.createISO8601DurationFromIntervalMS(autoInterval)
if err != nil {
@ -14,12 +19,12 @@ func setAutoTimeGrain(intervalMs int64, timeGrains []int64) (string, error) {
return autoTimeGrain, nil
}
// findClosestAllowedIntervalMs is used for the auto time grain setting.
// FindClosestAllowedIntervalMS is used for the auto time grain setting.
// It finds the closest time grain from the list of allowed time grains for Azure Monitor
// using the Grafana interval in milliseconds
// Some metrics only allow a limited list of time grains. The allowedTimeGrains parameter
// allows overriding the default list of allowed time grains.
func findClosestAllowedIntervalMS(intervalMs int64, allowedTimeGrains []int64) int64 {
func FindClosestAllowedIntervalMS(intervalMs int64, allowedTimeGrains []int64) int64 {
allowedIntervals := defaultAllowedIntervalsMS
if len(allowedTimeGrains) > 0 {

View File

@ -1,4 +1,4 @@
package azuremonitor
package time
import (
"fmt"

View File

@ -1,4 +1,4 @@
package azuremonitor
package time
import (
"testing"

View File

@ -1,15 +1,54 @@
package azuremonitor
package types
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
"strings"
"regexp"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
)
const (
TimeSeries = "time_series"
)
var (
LegendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
)
type AzRoute struct {
URL string
Scopes []string
Headers map[string]string
}
type AzureMonitorSettings struct {
SubscriptionId string `json:"subscriptionId"`
LogAnalyticsDefaultWorkspace string `json:"logAnalyticsDefaultWorkspace"`
AppInsightsAppId string `json:"appInsightsAppId"`
}
type DatasourceService struct {
URL string
HTTPClient *http.Client
}
type DatasourceInfo struct {
Cloud string
Credentials azcredentials.AzureCredentials
Settings AzureMonitorSettings
Routes map[string]AzRoute
Services map[string]DatasourceService
JSONData map[string]interface{}
DecryptedSecureJSONData map[string]string
DatasourceID int64
OrgID int64
}
// AzureMonitorQuery is the query for all the services as they have similar queries
// with a url, a querystring and an alias field
type AzureMonitorQuery struct {
@ -57,16 +96,6 @@ type AzureMonitorResponse struct {
Resourceregion string `json:"resourceregion"`
}
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
type AzureLogAnalyticsResponse struct {
Tables []AzureResponseTable `json:"tables"`
}
// AzureResourceGraphResponse is the json response object from the Azure Resource Graph Analytics API.
type AzureResourceGraphResponse struct {
Data AzureResponseTable `json:"data"`
}
// AzureResponseTable is the table format for Azure responses
type AzureResponseTable struct {
Name string `json:"name"`
@ -77,8 +106,8 @@ type AzureResponseTable struct {
Rows [][]interface{} `json:"rows"`
}
// azureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
type azureMonitorJSONQuery struct {
// AzureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
type AzureMonitorJSONQuery struct {
AzureMonitor struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
@ -94,20 +123,20 @@ type azureMonitorJSONQuery struct {
TimeGrain string `json:"timeGrain"`
Top string `json:"top"`
DimensionFilters []azureMonitorDimensionFilter `json:"dimensionFilters"` // new model
DimensionFilters []AzureMonitorDimensionFilter `json:"dimensionFilters"` // new model
} `json:"azureMonitor"`
Subscription string `json:"subscription"`
}
// azureMonitorDimensionFilter is the model for the frontend sent for azureMonitor metric
// AzureMonitorDimensionFilter is the model for the frontend sent for azureMonitor metric
// queries like "BlobType", "eq", "*"
type azureMonitorDimensionFilter struct {
type AzureMonitorDimensionFilter struct {
Dimension string `json:"dimension"`
Operator string `json:"operator"`
Filter string `json:"filter"`
}
func (a azureMonitorDimensionFilter) String() string {
func (a AzureMonitorDimensionFilter) String() string {
filter := "*"
if a.Filter != "" {
filter = a.Filter
@ -115,29 +144,8 @@ func (a azureMonitorDimensionFilter) String() string {
return fmt.Sprintf("%v %v '%v'", a.Dimension, a.Operator, filter)
}
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query.
type insightsJSONQuery struct {
AppInsights struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimensions InsightsDimensions `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
MetricName string `json:"metricName"`
TimeGrain string `json:"timeGrain"`
} `json:"appInsights"`
Raw *bool `json:"raw"`
}
type insightsAnalyticsJSONQuery struct {
InsightsAnalytics struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"insightsAnalytics"`
}
// logJSONQuery is the frontend JSON query model for an Azure Log Analytics query.
type logJSONQuery struct {
// LogJSONQuery is the frontend JSON query model for an Azure Log Analytics query.
type LogJSONQuery struct {
AzureLogAnalytics struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
@ -148,69 +156,22 @@ type logJSONQuery struct {
} `json:"azureLogAnalytics"`
}
type argJSONQuery struct {
AzureResourceGraph struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"azureResourceGraph"`
}
// metricChartDefinition is the JSON model for a metrics chart definition
type metricChartDefinition struct {
// MetricChartDefinition is the JSON model for a metrics chart definition
type MetricChartDefinition struct {
ResourceMetadata map[string]string `json:"resourceMetadata"`
Name string `json:"name"`
AggregationType int `json:"aggregationType"`
Namespace string `json:"namespace"`
MetricVisualization metricVisualization `json:"metricVisualization"`
MetricVisualization MetricVisualization `json:"metricVisualization"`
}
// metricVisualization is the JSON model for the visualization field of a
// MetricVisualization is the JSON model for the visualization field of a
// metricChartDefinition
type metricVisualization struct {
type MetricVisualization struct {
DisplayName string `json:"displayName"`
ResourceDisplayName string `json:"resourceDisplayName"`
}
// InsightsDimensions will unmarshal from a JSON string, or an array of strings,
// into a string array. This exists to support an older query format which is updated
// when a user saves the query or it is sent from the front end, but may not be when
// alerting fetches the model.
type InsightsDimensions []string
// UnmarshalJSON fulfills the json.Unmarshaler interface type.
func (s *InsightsDimensions) UnmarshalJSON(data []byte) error {
*s = InsightsDimensions{}
if string(data) == "null" || string(data) == "" {
return nil
}
if strings.ToLower(string(data)) == `"none"` {
return nil
}
if data[0] == '[' {
var sa []string
err := json.Unmarshal(data, &sa)
if err != nil {
return err
}
dimensions := []string{}
for _, v := range sa {
if v == "none" || v == "None" {
continue
}
dimensions = append(dimensions, v)
}
*s = InsightsDimensions(dimensions)
return nil
}
var str string
err := json.Unmarshal(data, &str)
if err != nil {
return fmt.Errorf("could not parse %q as string or array: %w", string(data), err)
}
if str != "" {
*s = InsightsDimensions{str}
return nil
}
return nil
type ServiceProxy interface {
Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter
}