Alerting: Send information about alert rule to data source in headers (#90344)

* add support of metadata to condition and adding it to request headers
* support for additional metadata when condition is built
* add additionall context to conditions: source and folder title
* add version
* use percent-encoding for header values
This commit is contained in:
Yuri Tseretyan 2024-07-17 15:55:12 -04:00 committed by GitHub
parent b825700343
commit c3b9c9b239
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 139 additions and 20 deletions

View File

@ -83,7 +83,7 @@ func (srv TestingApiSrv) RouteTestGrafanaRuleConfig(c *contextmodel.ReqContext,
}
}
evaluator, err := srv.evaluator.Create(eval.NewContext(c.Req.Context(), c.SignedInUser), rule.GetEvalCondition())
evaluator, err := srv.evaluator.Create(eval.NewContext(c.Req.Context(), c.SignedInUser), rule.GetEvalCondition().WithSource("preview"))
if err != nil {
return ErrResp(http.StatusBadRequest, err, "Failed to build evaluator for queries and expressions")
}

View File

@ -77,7 +77,7 @@ func (e *Engine) Test(ctx context.Context, user identity.Requester, rule *models
stateManager := e.createStateManager()
evaluator, err := backtestingEvaluatorFactory(ruleCtx, e.evalFactory, user, rule.GetEvalCondition(), &schedule.AlertingResultsFromRuleState{
evaluator, err := backtestingEvaluatorFactory(ruleCtx, e.evalFactory, user, rule.GetEvalCondition().WithSource("backtesting"), &schedule.AlertingResultsFromRuleState{
Manager: stateManager,
Rule: rule,
})

View File

@ -6,6 +6,7 @@ import (
"context"
"errors"
"fmt"
"net/url"
"runtime/debug"
"sort"
"strconv"
@ -43,13 +44,18 @@ type ConditionEvaluator interface {
Evaluate(ctx context.Context, now time.Time) (Results, error)
}
type expressionService interface {
type expressionExecutor interface {
ExecutePipeline(ctx context.Context, now time.Time, pipeline expr.DataPipeline) (*backend.QueryDataResponse, error)
}
type expressionBuilder interface {
expressionExecutor
BuildPipeline(req *expr.Request) (expr.DataPipeline, error)
}
type conditionEvaluator struct {
pipeline expr.DataPipeline
expressionService expressionService
expressionService expressionExecutor
condition models.Condition
evalTimeout time.Duration
evalResultLimit int
@ -105,7 +111,7 @@ type evaluatorImpl struct {
evaluationTimeout time.Duration
evaluationResultLimit int
dataSourceCache datasources.CacheService
expressionService *expr.Service
expressionService expressionBuilder
pluginsStore pluginstore.Store
}
@ -324,22 +330,24 @@ func ParseStateString(repr string) (State, error) {
}
}
func buildDatasourceHeaders(ctx context.Context) map[string]string {
headers := map[string]string{
// Many data sources check this in query method as sometimes alerting needs special considerations.
// Several existing systems also compare against the value of this header. Altering this constitutes a breaking change.
//
// Note: The spelling of this headers is intentionally degenerate from the others for compatibility reasons.
// When sent over a network, the key of this header is canonicalized to "Fromalert".
// However, some datasources still compare against the string "FromAlert".
models.FromAlertHeaderName: "true",
func buildDatasourceHeaders(ctx context.Context, metadata map[string]string) map[string]string {
headers := make(map[string]string, len(metadata)+3)
models.CacheSkipHeaderName: "true",
for key, value := range metadata {
headers[fmt.Sprintf("X-Rule-%s", key)] = url.QueryEscape(value)
}
// Many data sources check this in query method as sometimes alerting needs special considerations.
// Several existing systems also compare against the value of this header. Altering this constitutes a breaking change.
//
// Note: The spelling of this headers is intentionally degenerate from the others for compatibility reasons.
// When sent over a network, the key of this header is canonicalized to "Fromalert".
// However, some datasources still compare against the string "FromAlert".
headers[models.FromAlertHeaderName] = "true"
headers[models.CacheSkipHeaderName] = "true"
key, ok := models.RuleKeyFromContext(ctx)
if ok {
headers["X-Rule-Uid"] = key.UID
headers["X-Grafana-Org-Id"] = strconv.FormatInt(key.OrgID, 10)
}
@ -350,7 +358,7 @@ func buildDatasourceHeaders(ctx context.Context) map[string]string {
func getExprRequest(ctx EvaluationContext, condition models.Condition, dsCacheService datasources.CacheService, reader AlertingResultsReader) (*expr.Request, error) {
req := &expr.Request{
OrgId: ctx.User.GetOrgID(),
Headers: buildDatasourceHeaders(ctx.Ctx),
Headers: buildDatasourceHeaders(ctx.Ctx, condition.Metadata),
User: ctx.User,
}
datasources := make(map[string]*datasources.DataSource, len(condition.Data))

View File

@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"math/rand"
"strconv"
"testing"
"time"
@ -1243,10 +1244,90 @@ func TestResults_Error(t *testing.T) {
}
}
func TestCreate(t *testing.T) {
t.Run("should generate headers from metadata", func(t *testing.T) {
orgID := rand.Int63()
ctx := models.WithRuleKey(context.Background(), models.GenerateRuleKey(orgID))
q := models.CreateClassicConditionExpression("A", "B", "avg", "gt", 1)
condition := models.Condition{
Condition: q.RefID,
Data: []models.AlertQuery{
q,
},
Metadata: map[string]string{
"Test1": "data1",
"Test2": "музыка 🎶",
"Test3": "",
},
}
expectedHeaders := map[string]string{
"X-Rule-Test1": "data1",
"X-Rule-Test2": "%D0%BC%D1%83%D0%B7%D1%8B%D0%BA%D0%B0+%F0%9F%8E%B6",
"X-Rule-Test3": "",
models.FromAlertHeaderName: "true",
models.CacheSkipHeaderName: "true",
"X-Grafana-Org-Id": strconv.FormatInt(orgID, 10),
}
var request *expr.Request
factory := evaluatorImpl{
expressionService: fakeExpressionService{
buildHook: func(req *expr.Request) (expr.DataPipeline, error) {
if request != nil {
assert.Fail(t, "BuildPipeline was called twice but should be only once")
}
request = req
return expr.DataPipeline{
fakeNode{refID: q.RefID},
}, nil
},
},
}
_, err := factory.Create(NewContext(ctx, &user.SignedInUser{}), condition)
require.NoError(t, err)
require.NotNil(t, request)
require.Equal(t, expectedHeaders, request.Headers)
})
}
type fakeExpressionService struct {
hook func(ctx context.Context, now time.Time, pipeline expr.DataPipeline) (*backend.QueryDataResponse, error)
hook func(ctx context.Context, now time.Time, pipeline expr.DataPipeline) (*backend.QueryDataResponse, error)
buildHook func(req *expr.Request) (expr.DataPipeline, error)
}
func (f fakeExpressionService) ExecutePipeline(ctx context.Context, now time.Time, pipeline expr.DataPipeline) (*backend.QueryDataResponse, error) {
return f.hook(ctx, now, pipeline)
}
func (f fakeExpressionService) BuildPipeline(req *expr.Request) (expr.DataPipeline, error) {
return f.buildHook(req)
}
type fakeNode struct {
refID string
}
func (f fakeNode) ID() int64 {
return 0
}
func (f fakeNode) NodeType() expr.NodeType {
return expr.TypeCMDNode
}
func (f fakeNode) RefID() string {
return f.refID
}
func (f fakeNode) String() string {
return "Fake"
}
func (f fakeNode) NeedsVars() []string {
return nil
}

View File

@ -6,6 +6,7 @@ import (
"errors"
"fmt"
"hash/fnv"
"maps"
"sort"
"strconv"
"strings"
@ -367,13 +368,21 @@ func (alertRule *AlertRule) GetLabels(opts ...LabelOption) map[string]string {
}
func (alertRule *AlertRule) GetEvalCondition() Condition {
meta := map[string]string{
"Name": alertRule.Title,
"Uid": alertRule.UID,
"Type": string(alertRule.Type()),
"Version": strconv.FormatInt(alertRule.Version, 10),
}
if alertRule.Type() == RuleTypeRecording {
return Condition{
Metadata: meta,
Condition: alertRule.Record.From,
Data: alertRule.Data,
}
}
return Condition{
Metadata: meta,
Condition: alertRule.Condition,
Data: alertRule.Data,
}
@ -713,6 +722,8 @@ type UpdateRule struct {
// Condition contains backend expressions and queries and the RefID
// of the query or expression that will be evaluated.
type Condition struct {
// Additional information provided to the evaluation to include to the request as headers in format `X-Rule-{Key}`
Metadata map[string]string
// Condition is the RefID of the query or expression from
// the Data property to get the results for.
Condition string `json:"condition"`
@ -721,6 +732,25 @@ type Condition struct {
Data []AlertQuery `json:"data"`
}
func (c Condition) withMetadata(key, value string) Condition {
meta := make(map[string]string, len(c.Metadata)+1)
maps.Copy(meta, c.Metadata)
meta[key] = value
return Condition{
Metadata: meta,
Condition: c.Condition,
Data: c.Data,
}
}
func (c Condition) WithFolder(folderTitle string) Condition {
return c.withMetadata("Folder", folderTitle)
}
func (c Condition) WithSource(source string) Condition {
return c.withMetadata("Source", source)
}
// IsValid checks the condition's validity.
func (c Condition) IsValid() bool {
// TODO search for refIDs in QueriesAndExpressions

View File

@ -357,7 +357,7 @@ func (a *alertRule) evaluate(ctx context.Context, f fingerprint, attempt int64,
start := a.clock.Now()
evalCtx := eval.NewContextWithPreviousResults(ctx, SchedulerUserFor(e.rule.OrgID), a.newLoadedMetricsReader(e.rule))
ruleEval, err := a.evalFactory.Create(evalCtx, e.rule.GetEvalCondition())
ruleEval, err := a.evalFactory.Create(evalCtx, e.rule.GetEvalCondition().WithSource("scheduler").WithFolder(e.folderTitle))
var results eval.Results
var dur time.Duration
if err != nil {

View File

@ -272,7 +272,7 @@ func (r *recordingRule) tryEvaluation(ctx context.Context, ev *Evaluation, logge
func (r *recordingRule) buildAndExecutePipeline(ctx context.Context, evalCtx eval.EvaluationContext, ev *Evaluation, logger log.Logger) (*backend.QueryDataResponse, error) {
start := r.clock.Now()
evaluator, err := r.evalFactory.Create(evalCtx, ev.rule.GetEvalCondition())
evaluator, err := r.evalFactory.Create(evalCtx, ev.rule.GetEvalCondition().WithSource("scheduler").WithFolder(ev.folderTitle))
if err != nil {
logger.Error("Failed to build rule evaluator", "error", err)
return nil, err