CloudWatch/Logs: Fix log alerts in new unified alerting (#36558)

* Pass FromAlert header from new alerting

* Add better error messages
This commit is contained in:
Andrej Ocenas 2021-07-09 13:43:22 +02:00 committed by GitHub
parent 56903582ce
commit ea2ba06b93
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 22 additions and 19 deletions

View File

@ -157,7 +157,7 @@ func (s *Service) buildGraph(req *Request) (*simple.DirectedGraph, error) {
case dsName == DatasourceName || dsUID == DatasourceUID: case dsName == DatasourceName || dsUID == DatasourceUID:
node, err = buildCMDNode(dp, rn) node, err = buildCMDNode(dp, rn)
default: // If it's not an expression query, it's a data source query. default: // If it's not an expression query, it's a data source query.
node, err = s.buildDSNode(dp, rn, req.OrgId) node, err = s.buildDSNode(dp, rn, req)
} }
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -142,6 +142,7 @@ type DSNode struct {
timeRange TimeRange timeRange TimeRange
intervalMS int64 intervalMS int64
maxDP int64 maxDP int64
request Request
} }
// NodeType returns the data pipeline node type. // NodeType returns the data pipeline node type.
@ -149,7 +150,7 @@ func (dn *DSNode) NodeType() NodeType {
return TypeDatasourceNode return TypeDatasourceNode
} }
func (s *Service) buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64) (*DSNode, error) { func (s *Service) buildDSNode(dp *simple.DirectedGraph, rn *rawNode, req *Request) (*DSNode, error) {
encodedQuery, err := json.Marshal(rn.Query) encodedQuery, err := json.Marshal(rn.Query)
if err != nil { if err != nil {
return nil, err return nil, err
@ -160,12 +161,13 @@ func (s *Service) buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64
id: dp.NewNode().ID(), id: dp.NewNode().ID(),
refID: rn.RefID, refID: rn.RefID,
}, },
orgID: orgID, orgID: req.OrgId,
query: json.RawMessage(encodedQuery), query: json.RawMessage(encodedQuery),
queryType: rn.QueryType, queryType: rn.QueryType,
intervalMS: defaultIntervalMS, intervalMS: defaultIntervalMS,
maxDP: defaultMaxDP, maxDP: defaultMaxDP,
timeRange: rn.TimeRange, timeRange: rn.TimeRange,
request: *req,
} }
rawDsID, ok := rn.Query["datasourceId"] rawDsID, ok := rn.Query["datasourceId"]
@ -231,6 +233,7 @@ func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars, s *Service) (m
resp, err := s.queryData(ctx, &backend.QueryDataRequest{ resp, err := s.queryData(ctx, &backend.QueryDataRequest{
PluginContext: pc, PluginContext: pc,
Queries: q, Queries: q,
Headers: dn.request.Headers,
}) })
if err != nil { if err != nil {

View File

@ -206,6 +206,7 @@ func (s *Service) queryData(ctx context.Context, req *backend.QueryDataRequest)
tQ := plugins.DataQuery{ tQ := plugins.DataQuery{
TimeRange: &timeRange, TimeRange: &timeRange,
Queries: queries, Queries: queries,
Headers: req.Headers,
} }
// Execute the converted queries // Execute the converted queries

View File

@ -117,6 +117,10 @@ type AlertExecCtx struct {
func GetExprRequest(ctx AlertExecCtx, data []models.AlertQuery, now time.Time) (*expr.Request, error) { func GetExprRequest(ctx AlertExecCtx, data []models.AlertQuery, now time.Time) (*expr.Request, error) {
req := &expr.Request{ req := &expr.Request{
OrgId: ctx.OrgID, OrgId: ctx.OrgID,
Headers: map[string]string{
// Some data sources check this in query method as sometimes alerting needs special considerations.
"FromAlert": "true",
},
} }
for i := range data { for i := range data {

View File

@ -21,10 +21,7 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, model *
namespace := model.Get("namespace").MustString("") namespace := model.Get("namespace").MustString("")
metricName := model.Get("metricName").MustString("") metricName := model.Get("metricName").MustString("")
dimensions := model.Get("dimensions").MustMap() dimensions := model.Get("dimensions").MustMap()
statistics, err := parseStatistics(model) statistics := parseStatistics(model)
if err != nil {
return nil, err
}
period := int64(model.Get("period").MustInt(0)) period := int64(model.Get("period").MustInt(0))
if period == 0 && !usePrefixMatch { if period == 0 && !usePrefixMatch {
period = 300 period = 300

View File

@ -2,6 +2,7 @@ package cloudwatch
import ( import (
"errors" "errors"
"fmt"
"math" "math"
"regexp" "regexp"
"sort" "sort"
@ -52,20 +53,17 @@ func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time
} }
namespace, err := model.Get("namespace").String() namespace, err := model.Get("namespace").String()
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("failed to get namespace: %v", err)
} }
metricName, err := model.Get("metricName").String() metricName, err := model.Get("metricName").String()
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("failed to get metricName: %v", err)
} }
dimensions, err := parseDimensions(model) dimensions, err := parseDimensions(model)
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("failed to parse dimensions: %v", err)
}
statistics, err := parseStatistics(model)
if err != nil {
return nil, err
} }
statistics := parseStatistics(model)
p := model.Get("period").MustString("") p := model.Get("period").MustString("")
var period int var period int
@ -84,12 +82,12 @@ func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time
if reNumber.Match([]byte(p)) { if reNumber.Match([]byte(p)) {
period, err = strconv.Atoi(p) period, err = strconv.Atoi(p)
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("failed to parse period as integer: %v", err)
} }
} else { } else {
d, err := time.ParseDuration(p) d, err := time.ParseDuration(p)
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("failed to parse period as duration: %v", err)
} }
period = int(d.Seconds()) period = int(d.Seconds())
} }
@ -125,13 +123,13 @@ func parseRequestQuery(model *simplejson.Json, refId string, startTime time.Time
}, nil }, nil
} }
func parseStatistics(model *simplejson.Json) ([]string, error) { func parseStatistics(model *simplejson.Json) []string {
var statistics []string var statistics []string
for _, s := range model.Get("statistics").MustArray() { for _, s := range model.Get("statistics").MustArray() {
statistics = append(statistics, s.(string)) statistics = append(statistics, s.(string))
} }
return statistics, nil return statistics
} }
func parseDimensions(model *simplejson.Json) (map[string][]string, error) { func parseDimensions(model *simplejson.Json) (map[string][]string, error) {
@ -145,7 +143,7 @@ func parseDimensions(model *simplejson.Json) (map[string][]string, error) {
parsedDimensions[k] = append(parsedDimensions[k], value.(string)) parsedDimensions[k] = append(parsedDimensions[k], value.(string))
} }
} else { } else {
return nil, errors.New("failed to parse dimensions") return nil, errors.New("unknown type as dimension value")
} }
} }