Alerting: Expand {{$labels.xyz}} template in labels and annotations (#35159)

* Alerting: Expand `{{$labels.xyz}}` template in labels and annotations

Signed-off-by: Ganesh Vernekar <ganeshvern@gmail.com>

* Fix annotation not updating for same alert

Signed-off-by: Ganesh Vernekar <ganeshvern@gmail.com>
This commit is contained in:
Ganesh Vernekar 2021-06-03 22:54:36 +05:30 committed by GitHub
parent 17d98cfe43
commit 8417088969
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 120 additions and 10 deletions

View File

@ -1,9 +1,11 @@
package state
import (
"bytes"
"fmt"
"strings"
"sync"
text_template "text/template"
"github.com/grafana/grafana-plugin-sdk-go/data"
@ -33,16 +35,21 @@ func (c *cache) getOrCreate(alertRule *ngModels.AlertRule, result eval.Result) *
c.mtxStates.Lock()
defer c.mtxStates.Unlock()
templateData := make(map[string]string, len(result.Instance)+3)
for k, v := range result.Instance {
templateData[k] = v
}
attachRuleLabels(templateData, alertRule)
ruleLabels, annotations := c.expandRuleLabelsAndAnnotations(alertRule, templateData)
// if duplicate labels exist, alertRule label will take precedence
lbs := mergeLabels(alertRule.Labels, result.Instance)
lbs[ngModels.UIDLabel] = alertRule.UID
lbs[ngModels.NamespaceUIDLabel] = alertRule.NamespaceUID
lbs[prometheusModel.AlertNameLabel] = alertRule.Title
lbs := mergeLabels(ruleLabels, result.Instance)
attachRuleLabels(lbs, alertRule)
il := ngModels.InstanceLabels(lbs)
id, err := il.StringKey()
if err != nil {
c.log.Error("error getting cacheId for entry", "msg", err.Error())
c.log.Error("error getting cacheId for entry", "err", err.Error())
}
if _, ok := c.states[alertRule.OrgID]; !ok {
@ -53,14 +60,12 @@ func (c *cache) getOrCreate(alertRule *ngModels.AlertRule, result eval.Result) *
}
if state, ok := c.states[alertRule.OrgID][alertRule.UID][id]; ok {
// Annotations can change over time for the same alert.
state.Annotations = annotations
c.states[alertRule.OrgID][alertRule.UID][id] = state
return state
}
annotations := map[string]string{}
if len(alertRule.Annotations) > 0 {
annotations = alertRule.Annotations
}
// If the first result we get is alerting, set StartsAt to EvaluatedAt because we
// do not have data for determining StartsAt otherwise
newState := &State{
@ -78,6 +83,62 @@ func (c *cache) getOrCreate(alertRule *ngModels.AlertRule, result eval.Result) *
return newState
}
func attachRuleLabels(m map[string]string, alertRule *ngModels.AlertRule) {
m[ngModels.UIDLabel] = alertRule.UID
m[ngModels.NamespaceUIDLabel] = alertRule.NamespaceUID
m[prometheusModel.AlertNameLabel] = alertRule.Title
}
func (c *cache) expandRuleLabelsAndAnnotations(alertRule *ngModels.AlertRule, data map[string]string) (map[string]string, map[string]string) {
expand := func(original map[string]string) map[string]string {
expanded := make(map[string]string, len(original))
for k, v := range original {
ev, err := expandTemplate(alertRule.Title, v, data)
expanded[k] = ev
if err != nil {
c.log.Error("error in expanding template", "name", k, "value", v, "err", err.Error())
// Store the original template on error.
expanded[k] = v
}
}
return expanded
}
return expand(alertRule.Labels), expand(alertRule.Annotations)
}
func expandTemplate(name, text string, data map[string]string) (result string, resultErr error) {
name = "__alert_" + name
text = "{{- $labels := .Labels -}}" + text
// It'd better to have no alert description than to kill the whole process
// if there's a bug in the template.
defer func() {
if r := recover(); r != nil {
var ok bool
resultErr, ok = r.(error)
if !ok {
resultErr = fmt.Errorf("panic expanding template %v: %v", name, r)
}
}
}()
tmpl, err := text_template.New(name).Option("missingkey=zero").Parse(text)
if err != nil {
return "", fmt.Errorf("error parsing template %v: %s", name, err.Error())
}
var buffer bytes.Buffer
err = tmpl.Execute(&buffer, struct {
Labels map[string]string
}{
Labels: data,
})
if err != nil {
return "", fmt.Errorf("error executing template %v: %s", name, err.Error())
}
return buffer.String(), nil
}
func (c *cache) set(entry *State) {
c.mtxStates.Lock()
defer c.mtxStates.Unlock()

View File

@ -774,6 +774,55 @@ func TestProcessEvalResults(t *testing.T) {
},
},
},
{
desc: "template is correctly expanded",
alertRule: &models.AlertRule{
OrgID: 1,
Title: "test_title",
UID: "test_alert_rule_uid",
NamespaceUID: "test_namespace_uid",
Annotations: map[string]string{"summary": "{{$labels.pod}} is down in {{$labels.cluster}} cluster -> {{$labels.namespace}} namespace"},
Labels: map[string]string{"label": "test", "job": "{{$labels.namespace}}/{{$labels.pod}}"},
IntervalSeconds: 10,
},
evalResults: []eval.Results{
{
eval.Result{
Instance: data.Labels{"cluster": "us-central-1", "namespace": "prod", "pod": "grafana"},
State: eval.Normal,
EvaluatedAt: evaluationTime,
EvaluationDuration: evaluationDuration,
},
},
},
expectedStates: map[string]*state.State{
`[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid"],["alertname","test_title"],["cluster","us-central-1"],["job","prod/grafana"],["label","test"],["namespace","prod"],["pod","grafana"]]`: {
AlertRuleUID: "test_alert_rule_uid",
OrgID: 1,
CacheId: `[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid"],["alertname","test_title"],["cluster","us-central-1"],["job","prod/grafana"],["label","test"],["namespace","prod"],["pod","grafana"]]`,
Labels: data.Labels{
"__alert_rule_namespace_uid__": "test_namespace_uid",
"__alert_rule_uid__": "test_alert_rule_uid",
"alertname": "test_title",
"cluster": "us-central-1",
"namespace": "prod",
"pod": "grafana",
"label": "test",
"job": "prod/grafana",
},
State: eval.Normal,
Results: []state.Evaluation{
{
EvaluationTime: evaluationTime,
EvaluationState: eval.Normal,
},
},
LastEvaluationTime: evaluationTime,
EvaluationDuration: evaluationDuration,
Annotations: map[string]string{"summary": "grafana is down in us-central-1 cluster -> prod namespace"},
},
},
},
}
for _, tc := range testCases {