AzureMonitor: Add errorsource (#92094)

* Add errorsource

* Migrate to individually build metric queries

* Migrate logs queries to be built individually

* Migrate to individually build resource graph queries
This commit is contained in:
Andreas Christou 2024-09-09 10:29:35 +01:00 committed by GitHub
parent 8eb7e55f8f
commit d61530941a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 246 additions and 242 deletions

View File

@ -6,6 +6,7 @@ import (
"context" "context"
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
@ -18,6 +19,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing" "github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/experimental/errorsource"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace" "go.opentelemetry.io/otel/trace"
@ -136,15 +138,16 @@ func (e *AzureLogAnalyticsDatasource) GetBasicLogsUsage(ctx context.Context, url
// 3. parses the responses for each query into data frames // 3. parses the responses for each query into data frames
func (e *AzureLogAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, fromAlert bool) (*backend.QueryDataResponse, error) { func (e *AzureLogAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, fromAlert bool) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse() result := backend.NewQueryDataResponse()
queries, err := e.buildQueries(ctx, originalQueries, dsInfo, fromAlert)
if err != nil {
return nil, err
}
for _, query := range queries { for _, query := range originalQueries {
res, err := e.executeQuery(ctx, query, dsInfo, client, url) logsQuery, err := e.buildQuery(ctx, query, dsInfo, fromAlert)
if err != nil { if err != nil {
result.Responses[query.RefID] = backend.DataResponse{Error: err} errorsource.AddErrorToResponse(query.RefID, result, err)
continue
}
res, err := e.executeQuery(ctx, logsQuery, dsInfo, client, url)
if err != nil {
errorsource.AddErrorToResponse(query.RefID, result, err)
continue continue
} }
result.Responses[query.RefID] = *res result.Responses[query.RefID] = *res
@ -179,6 +182,7 @@ func buildLogAnalyticsQuery(query backend.DataQuery, dsInfo types.DatasourceInfo
if basicLogsQueryFlag { if basicLogsQueryFlag {
if meetsBasicLogsCriteria, meetsBasicLogsCriteriaErr := meetsBasicLogsCriteria(resources, fromAlert); meetsBasicLogsCriteriaErr != nil { if meetsBasicLogsCriteria, meetsBasicLogsCriteriaErr := meetsBasicLogsCriteria(resources, fromAlert); meetsBasicLogsCriteriaErr != nil {
// This error is a downstream error
return nil, meetsBasicLogsCriteriaErr return nil, meetsBasicLogsCriteriaErr
} else { } else {
basicLogsQuery = meetsBasicLogsCriteria basicLogsQuery = meetsBasicLogsCriteria
@ -224,20 +228,23 @@ func buildLogAnalyticsQuery(query backend.DataQuery, dsInfo types.DatasourceInfo
}, nil }, nil
} }
func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, queries []backend.DataQuery, dsInfo types.DatasourceInfo, fromAlert bool) ([]*AzureLogAnalyticsQuery, error) { func (e *AzureLogAnalyticsDatasource) buildQuery(ctx context.Context, query backend.DataQuery, dsInfo types.DatasourceInfo, fromAlert bool) (*AzureLogAnalyticsQuery, error) {
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{} var azureLogAnalyticsQuery *AzureLogAnalyticsQuery
appInsightsRegExp, err := regexp.Compile("(?i)providers/microsoft.insights/components") appInsightsRegExp, err := regexp.Compile("(?i)providers/microsoft.insights/components")
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to compile Application Insights regex") return nil, fmt.Errorf("failed to compile Application Insights regex")
} }
for _, query := range queries {
if query.QueryType == string(dataquery.AzureQueryTypeAzureLogAnalytics) { if query.QueryType == string(dataquery.AzureQueryTypeAzureLogAnalytics) {
azureLogAnalyticsQuery, err := buildLogAnalyticsQuery(query, dsInfo, appInsightsRegExp, fromAlert) azureLogAnalyticsQuery, err = buildLogAnalyticsQuery(query, dsInfo, appInsightsRegExp, fromAlert)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to build azure log analytics query: %w", err) errorMessage := fmt.Errorf("failed to build azure log analytics query: %w", err)
var sourceError errorsource.Error
if errors.As(err, &sourceError) {
return nil, errorsource.SourceError(sourceError.Source(), errorMessage, false)
}
return nil, errorMessage
} }
azureLogAnalyticsQueries = append(azureLogAnalyticsQueries, azureLogAnalyticsQuery)
} }
if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) || query.QueryType == string(dataquery.AzureQueryTypeTraceql) { if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) || query.QueryType == string(dataquery.AzureQueryTypeTraceql) {
@ -245,24 +252,28 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(ctx context.Context, queries
cfg := backend.GrafanaConfigFromContext(ctx) cfg := backend.GrafanaConfigFromContext(ctx)
hasPromExemplarsToggle := cfg.FeatureToggles().IsEnabled("azureMonitorPrometheusExemplars") hasPromExemplarsToggle := cfg.FeatureToggles().IsEnabled("azureMonitorPrometheusExemplars")
if !hasPromExemplarsToggle { if !hasPromExemplarsToggle {
return nil, fmt.Errorf("query type unsupported as azureMonitorPrometheusExemplars feature toggle is not enabled") return nil, errorsource.DownstreamError(fmt.Errorf("query type unsupported as azureMonitorPrometheusExemplars feature toggle is not enabled"), false)
} }
} }
azureAppInsightsQuery, err := buildAppInsightsQuery(ctx, query, dsInfo, appInsightsRegExp, e.Logger) azureAppInsightsQuery, err := buildAppInsightsQuery(ctx, query, dsInfo, appInsightsRegExp, e.Logger)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to build azure application insights query: %w", err) errorMessage := fmt.Errorf("failed to build azure application insights query: %w", err)
var sourceError errorsource.Error
if errors.As(err, &sourceError) {
return nil, errorsource.SourceError(sourceError.Source(), errorMessage, false)
} }
azureLogAnalyticsQueries = append(azureLogAnalyticsQueries, azureAppInsightsQuery) return nil, errorMessage
} }
azureLogAnalyticsQuery = azureAppInsightsQuery
} }
return azureLogAnalyticsQueries, nil return azureLogAnalyticsQuery, nil
} }
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client, url string) (*backend.DataResponse, error) { func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client, url string) (*backend.DataResponse, error) {
// If azureLogAnalyticsSameAs is defined and set to false, return an error // If azureLogAnalyticsSameAs is defined and set to false, return an error
if sameAs, ok := dsInfo.JSONData["azureLogAnalyticsSameAs"]; ok && !sameAs.(bool) { if sameAs, ok := dsInfo.JSONData["azureLogAnalyticsSameAs"]; ok && !sameAs.(bool) {
return nil, fmt.Errorf("credentials for Log Analytics are no longer supported. Go to the data source configuration to update Azure Monitor credentials") return nil, errorsource.DownstreamError(fmt.Errorf("credentials for Log Analytics are no longer supported. Go to the data source configuration to update Azure Monitor credentials"), false)
} }
queryJSONModel := dataquery.AzureMonitorQuery{} queryJSONModel := dataquery.AzureMonitorQuery{}
@ -273,7 +284,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
if query.QueryType == dataquery.AzureQueryTypeAzureTraces { if query.QueryType == dataquery.AzureQueryTypeAzureTraces {
if query.ResultFormat == dataquery.ResultFormatTrace && query.Query == "" { if query.ResultFormat == dataquery.ResultFormatTrace && query.Query == "" {
return nil, fmt.Errorf("cannot visualise trace events using the trace visualiser") return nil, errorsource.DownstreamError(fmt.Errorf("cannot visualise trace events using the trace visualiser"), false)
} }
} }
@ -294,7 +305,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
res, err := client.Do(req) res, err := client.Do(req)
if err != nil { if err != nil {
return nil, err return nil, errorsource.DownstreamError(err, false)
} }
defer func() { defer func() {
@ -611,7 +622,7 @@ func getCorrelationWorkspaces(ctx context.Context, baseResource string, resource
}() }()
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
return AzureCorrelationAPIResponse{}, fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body)) return AzureCorrelationAPIResponse{}, errorsource.SourceError(backend.ErrorSourceFromHTTPStatus(res.StatusCode), fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body)), false)
} }
var data AzureCorrelationAPIResponse var data AzureCorrelationAPIResponse
d := json.NewDecoder(bytes.NewReader(body)) d := json.NewDecoder(bytes.NewReader(body))
@ -675,7 +686,7 @@ func (e *AzureLogAnalyticsDatasource) unmarshalResponse(res *http.Response) (Azu
}() }()
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
return AzureLogAnalyticsResponse{}, fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body)) return AzureLogAnalyticsResponse{}, errorsource.SourceError(backend.ErrorSourceFromHTTPStatus(res.StatusCode), fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body)), false)
} }
var data AzureLogAnalyticsResponse var data AzureLogAnalyticsResponse

View File

@ -741,7 +741,7 @@ func Test_exemplarsFeatureToggle(t *testing.T) {
QueryType: string(dataquery.AzureQueryTypeTraceql), QueryType: string(dataquery.AzureQueryTypeTraceql),
} }
_, err := ds.buildQueries(ctx, []backend.DataQuery{query}, dsInfo, false) _, err := ds.buildQuery(ctx, query, dsInfo, false)
require.NoError(t, err) require.NoError(t, err)
}) })
@ -761,7 +761,7 @@ func Test_exemplarsFeatureToggle(t *testing.T) {
QueryType: string(dataquery.AzureQueryTypeTraceql), QueryType: string(dataquery.AzureQueryTypeTraceql),
} }
_, err := ds.buildQueries(ctx, []backend.DataQuery{query}, dsInfo, false) _, err := ds.buildQuery(ctx, query, dsInfo, false)
require.Error(t, err, "query type unsupported as azureMonitorPrometheusExemplars feature toggle is not enabled") require.Error(t, err, "query type unsupported as azureMonitorPrometheusExemplars feature toggle is not enabled")
}) })

View File

@ -8,6 +8,7 @@ import (
"time" "time"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/experimental/errorsource"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
) )
@ -45,14 +46,14 @@ func AddConfigLinks(frame data.Frame, dl string, title *string) data.Frame {
// 4. number of selected resources is exactly one // 4. number of selected resources is exactly one
func meetsBasicLogsCriteria(resources []string, fromAlert bool) (bool, error) { func meetsBasicLogsCriteria(resources []string, fromAlert bool) (bool, error) {
if fromAlert { if fromAlert {
return false, fmt.Errorf("basic Logs queries cannot be used for alerts") return false, errorsource.DownstreamError(fmt.Errorf("basic Logs queries cannot be used for alerts"), false)
} }
if len(resources) != 1 { if len(resources) != 1 {
return false, fmt.Errorf("basic logs queries cannot be run against multiple resources") return false, errorsource.DownstreamError(fmt.Errorf("basic logs queries cannot be run against multiple resources"), false)
} }
if !strings.Contains(strings.ToLower(resources[0]), "microsoft.operationalinsights/workspaces") { if !strings.Contains(strings.ToLower(resources[0]), "microsoft.operationalinsights/workspaces") {
return false, fmt.Errorf("basic Logs queries may only be run against Log Analytics workspaces") return false, errorsource.DownstreamError(fmt.Errorf("basic Logs queries may only be run against Log Analytics workspaces"), false)
} }
return true, nil return true, nil

View File

@ -17,6 +17,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/log" "github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing" "github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/experimental/errorsource"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace" "go.opentelemetry.io/otel/trace"
@ -51,15 +52,15 @@ func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *ht
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, fromAlert bool) (*backend.QueryDataResponse, error) { func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, fromAlert bool) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse() result := backend.NewQueryDataResponse()
queries, err := e.buildQueries(originalQueries, dsInfo) for _, query := range originalQueries {
azureQuery, err := e.buildQuery(query, dsInfo)
if err != nil { if err != nil {
return nil, err errorsource.AddErrorToResponse(query.RefID, result, err)
continue
} }
res, err := e.executeQuery(ctx, azureQuery, dsInfo, client, url)
for _, query := range queries {
res, err := e.executeQuery(ctx, query, dsInfo, client, url)
if err != nil { if err != nil {
result.Responses[query.RefID] = backend.DataResponse{Error: err} errorsource.AddErrorToResponse(query.RefID, result, err)
continue continue
} }
result.Responses[query.RefID] = *res result.Responses[query.RefID] = *res
@ -68,10 +69,7 @@ func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, ori
return result, nil return result, nil
} }
func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) { func (e *AzureMonitorDatasource) buildQuery(query backend.DataQuery, dsInfo types.DatasourceInfo) (*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string var target string
queryJSONModel := dataquery.AzureMonitorQuery{} queryJSONModel := dataquery.AzureMonitorQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel) err := json.Unmarshal(query.JSON, &queryJSONModel)
@ -191,7 +189,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
sub = *queryJSONModel.Subscription sub = *queryJSONModel.Subscription
} }
query := &types.AzureMonitorQuery{ azureQuery := &types.AzureMonitorQuery{
URL: azureURL, URL: azureURL,
Target: target, Target: target,
Params: params, Params: params,
@ -204,15 +202,13 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
} }
if filterString != "" { if filterString != "" {
if filterInBody { if filterInBody {
query.BodyFilter = filterString azureQuery.BodyFilter = filterString
} else { } else {
query.Params.Add("$filter", filterString) azureQuery.Params.Add("$filter", filterString)
} }
} }
azureMonitorQueries = append(azureMonitorQueries, query)
}
return azureMonitorQueries, nil return azureQuery, nil
} }
func getParams(azJSONModel *dataquery.AzureMetricQuery, query backend.DataQuery) (url.Values, error) { func getParams(azJSONModel *dataquery.AzureMetricQuery, query backend.DataQuery) (url.Values, error) {
@ -288,7 +284,7 @@ func (e *AzureMonitorDatasource) retrieveSubscriptionDetails(cli *http.Client, c
} }
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
return "", fmt.Errorf("request failed, status: %s, error: %s", res.Status, string(body)) return "", errorsource.SourceError(backend.ErrorSourceFromHTTPStatus(res.StatusCode), fmt.Errorf("request failed, status: %s, error: %s", res.Status, string(body)), false)
} }
var data types.SubscriptionsResponse var data types.SubscriptionsResponse
@ -325,7 +321,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *types.
res, err := cli.Do(req) res, err := cli.Do(req)
if err != nil { if err != nil {
return nil, err return nil, errorsource.DownstreamError(err, false)
} }
defer func() { defer func() {
@ -370,7 +366,7 @@ func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (types.Az
} }
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s, error: %s", res.Status, string(body)) return types.AzureMonitorResponse{}, errorsource.SourceError(backend.ErrorSourceFromHTTPStatus(res.StatusCode), fmt.Errorf("request failed, status: %s, body: %s", res.Status, string(body)), false)
} }
var data types.AzureMonitorResponse var data types.AzureMonitorResponse

View File

@ -294,7 +294,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
}, },
} }
queries, err := datasource.buildQueries(tsdbQuery, dsInfo) query, err := datasource.buildQuery(tsdbQuery[0], dsInfo)
require.NoError(t, err) require.NoError(t, err)
resources := map[string]dataquery.AzureMonitorResource{} resources := map[string]dataquery.AzureMonitorResource{}
@ -321,12 +321,12 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
Resources: resources, Resources: resources,
} }
assert.Equal(t, tt.expectedParamFilter, queries[0].Params.Get("$filter")) assert.Equal(t, tt.expectedParamFilter, query.Params.Get("$filter"))
if azureMonitorQuery.URL == "" { if azureMonitorQuery.URL == "" {
azureMonitorQuery.URL = "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics" azureMonitorQuery.URL = "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics"
} }
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(struct{}{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, "Params", "Dimensions")); diff != "" { if diff := cmp.Diff(azureMonitorQuery, query, cmpopts.IgnoreUnexported(struct{}{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, "Params", "Dimensions")); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)
} }
@ -338,7 +338,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
expectedPortalURL = *tt.expectedPortalURL expectedPortalURL = *tt.expectedPortalURL
} }
actual, err := getQueryUrl(queries[0], "http://ds", "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana", "grafana") actual, err := getQueryUrl(query, "http://ds", "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana", "grafana")
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, expectedPortalURL, actual) require.Equal(t, expectedPortalURL, actual)
}) })
@ -359,10 +359,10 @@ func TestCustomNamespace(t *testing.T) {
}, },
} }
result, err := datasource.buildQueries(q, types.DatasourceInfo{}) result, err := datasource.buildQuery(q[0], types.DatasourceInfo{})
require.NoError(t, err) require.NoError(t, err)
expected := "custom/namespace" expected := "custom/namespace"
require.Equal(t, expected, result[0].Params.Get("metricnamespace")) require.Equal(t, expected, result.Params.Get("metricnamespace"))
}) })
} }

View File

@ -3,6 +3,8 @@ package metrics
import ( import (
"fmt" "fmt"
"strings" "strings"
"github.com/grafana/grafana-plugin-sdk-go/experimental/errorsource"
) )
// urlBuilder builds the URL for calling the Azure Monitor API // urlBuilder builds the URL for calling the Azure Monitor API
@ -33,7 +35,7 @@ func (params *urlBuilder) buildResourceURI() (*string, error) {
if metricNamespace == nil || *metricNamespace == "" { if metricNamespace == nil || *metricNamespace == "" {
if params.MetricDefinition == nil || *params.MetricDefinition == "" { if params.MetricDefinition == nil || *params.MetricDefinition == "" {
return nil, fmt.Errorf("no metricNamespace or metricDefiniton value provided") return nil, errorsource.DownstreamError(fmt.Errorf("no metricNamespace or metricDefiniton value provided"), false)
} }
metricNamespace = params.MetricDefinition metricNamespace = params.MetricDefinition
} }
@ -45,7 +47,7 @@ func (params *urlBuilder) buildResourceURI() (*string, error) {
provider = metricNamespaceArray[0] provider = metricNamespaceArray[0]
metricNamespaceArray = metricNamespaceArray[1:] metricNamespaceArray = metricNamespaceArray[1:]
} else { } else {
return nil, fmt.Errorf("metricNamespace is not in the correct format") return nil, errorsource.DownstreamError(fmt.Errorf("metricNamespace is not in the correct format"), false)
} }
var resourceNameArray []string var resourceNameArray []string
@ -76,7 +78,7 @@ func (params *urlBuilder) buildResourceURI() (*string, error) {
if i < len(resourceNameArray) { if i < len(resourceNameArray) {
urlArray = append(urlArray, namespace, resourceNameArray[i]) urlArray = append(urlArray, namespace, resourceNameArray[i])
} else { } else {
return nil, fmt.Errorf("resourceNameArray does not have enough elements") return nil, errorsource.DownstreamError(fmt.Errorf("resourceNameArray does not have enough elements"), false)
} }
} }

View File

@ -15,6 +15,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/log" "github.com/grafana/grafana-plugin-sdk-go/backend/log"
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing" "github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/experimental/errorsource"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace" "go.opentelemetry.io/otel/trace"
@ -63,15 +64,14 @@ func (e *AzureResourceGraphDatasource) ExecuteTimeSeriesQuery(ctx context.Contex
Responses: map[string]backend.DataResponse{}, Responses: map[string]backend.DataResponse{},
} }
queries, err := e.buildQueries(originalQueries, dsInfo) for _, query := range originalQueries {
graphQuery, err := e.buildQuery(query, dsInfo)
if err != nil { if err != nil {
return nil, err return nil, err
} }
res, err := e.executeQuery(ctx, graphQuery, dsInfo, client, url)
for _, query := range queries {
res, err := e.executeQuery(ctx, query, dsInfo, client, url)
if err != nil { if err != nil {
result.Responses[query.RefID] = backend.DataResponse{Error: err} errorsource.AddErrorToResponse(query.RefID, result, err)
continue continue
} }
result.Responses[query.RefID] = *res result.Responses[query.RefID] = *res
@ -87,9 +87,7 @@ type argJSONQuery struct {
} `json:"azureResourceGraph"` } `json:"azureResourceGraph"`
} }
func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureResourceGraphQuery, error) { func (e *AzureResourceGraphDatasource) buildQuery(query backend.DataQuery, dsInfo types.DatasourceInfo) (*AzureResourceGraphQuery, error) {
azureResourceGraphQueries := make([]*AzureResourceGraphQuery, len(queries))
for i, query := range queries {
queryJSONModel := argJSONQuery{} queryJSONModel := argJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel) err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil { if err != nil {
@ -108,17 +106,14 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery,
return nil, err return nil, err
} }
azureResourceGraphQueries[i] = &AzureResourceGraphQuery{ return &AzureResourceGraphQuery{
RefID: query.RefID, RefID: query.RefID,
ResultFormat: resultFormat, ResultFormat: resultFormat,
JSON: query.JSON, JSON: query.JSON,
InterpolatedQuery: interpolatedQuery, InterpolatedQuery: interpolatedQuery,
TimeRange: query.TimeRange, TimeRange: query.TimeRange,
QueryType: query.QueryType, QueryType: query.QueryType,
} }, nil
}
return azureResourceGraphQueries, nil
} }
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo types.DatasourceInfo, client *http.Client, dsURL string) (*backend.DataResponse, error) { func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo types.DatasourceInfo, client *http.Client, dsURL string) (*backend.DataResponse, error) {
@ -164,7 +159,7 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
res, err := client.Do(req) res, err := client.Do(req)
if err != nil { if err != nil {
return nil, err return nil, errorsource.DownstreamError(err, false)
} }
defer func() { defer func() {
@ -224,7 +219,7 @@ func (e *AzureResourceGraphDatasource) unmarshalResponse(res *http.Response) (Az
}() }()
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
return AzureResourceGraphResponse{}, fmt.Errorf("%s. Azure Resource Graph error: %s", res.Status, string(body)) return AzureResourceGraphResponse{}, errorsource.SourceError(backend.ErrorSourceFromHTTPStatus(res.StatusCode), fmt.Errorf("%s. Azure Resource Graph error: %s", res.Status, string(body)), false)
} }
var data AzureResourceGraphResponse var data AzureResourceGraphResponse

View File

@ -28,7 +28,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
name string name string
queryModel []backend.DataQuery queryModel []backend.DataQuery
timeRange backend.TimeRange timeRange backend.TimeRange
azureResourceGraphQueries []*AzureResourceGraphQuery azureResourceGraphQuery AzureResourceGraphQuery
Err require.ErrorAssertionFunc Err require.ErrorAssertionFunc
}{ }{
{ {
@ -49,8 +49,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
RefID: "A", RefID: "A",
}, },
}, },
azureResourceGraphQueries: []*AzureResourceGraphQuery{ azureResourceGraphQuery: AzureResourceGraphQuery{
{
RefID: "A", RefID: "A",
ResultFormat: "table", ResultFormat: "table",
URL: "", URL: "",
@ -63,16 +62,15 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
}`), }`),
InterpolatedQuery: "resources | where ['name'] in ('res1','res2')", InterpolatedQuery: "resources | where ['name'] in ('res1','res2')",
}, },
},
Err: require.NoError, Err: require.NoError,
}, },
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(tt.queryModel, types.DatasourceInfo{}) query, err := datasource.buildQuery(tt.queryModel[0], types.DatasourceInfo{})
tt.Err(t, err) tt.Err(t, err)
if diff := cmp.Diff(tt.azureResourceGraphQueries, queries, cmpopts.IgnoreUnexported(struct{}{})); diff != "" { if diff := cmp.Diff(&tt.azureResourceGraphQuery, query, cmpopts.IgnoreUnexported(struct{}{})); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)
} }
}) })

View File

@ -7,6 +7,7 @@ import (
"time" "time"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime" "github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"github.com/grafana/grafana-plugin-sdk-go/experimental/errorsource"
) )
// TimeGrain handles conversions between // TimeGrain handles conversions between
@ -26,7 +27,7 @@ func CreateISO8601DurationFromIntervalMS(it int64) (string, error) {
timeValueString := formatted[0 : len(formatted)-1] timeValueString := formatted[0 : len(formatted)-1]
timeValue, err := strconv.Atoi(timeValueString) timeValue, err := strconv.Atoi(timeValueString)
if err != nil { if err != nil {
return "", fmt.Errorf("could not parse interval %q to an ISO 8061 duration: %w", it, err) return "", errorsource.DownstreamError(fmt.Errorf("could not parse interval %q to an ISO 8061 duration: %w", it, err), false)
} }
unit := formatted[len(formatted)-1:] unit := formatted[len(formatted)-1:]