Alerting: unwrap upsert into insert and update function (#47731)

* Alerting: unwrap upsert into insert and update function

* add changelog entry

* remove changelog entry

* rename upsertrule to updaterule

* use directly alertrule model for inserts

* add test for updating a rule with a conflicting name
This commit is contained in:
Jean-Philippe Quéméner 2022-04-14 14:21:36 +02:00 committed by GitHub
parent c63086822d
commit 060ccacbf9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 231 additions and 111 deletions

View File

@ -48,4 +48,4 @@ Scopes must have an order to ensure consistency and ease of search, this helps u
- [CHANGE] Prometheus Compatible API: Use float-like values for `api/prometheus/grafana/api/v1/alerts` and `api/prometheus/grafana/api/v1/rules` instead of the evaluation string #47216
- [BUGFIX] (Legacy) Templates: Parse notification templates using all the matches of the alert rule when going from `Alerting` to `OK` in legacy alerting #47355
- [BUGFIX] Scheduler: Fix state manager to support OK option of `AlertRule.ExecErrState` #47670
- [ENHANCEMENT] Templates: Enable the use of classic condition values in templates #46971
- [ENHANCEMENT] Templates: Enable the use of classic condition values in templates #46971

View File

@ -355,23 +355,25 @@ func (srv RulerSrv) updateAlertRulesInGroup(c *models.ReqContext, namespace *mod
logger.Debug("updating database with the authorized changes", "add", len(authorizedChanges.New), "update", len(authorizedChanges.New), "delete", len(authorizedChanges.Delete))
if len(authorizedChanges.Update) > 0 || len(authorizedChanges.New) > 0 {
upsert := make([]store.UpsertRule, 0, len(authorizedChanges.Update)+len(authorizedChanges.New))
updates := make([]store.UpdateRule, 0, len(authorizedChanges.Update))
inserts := make([]ngmodels.AlertRule, 0, len(authorizedChanges.New))
for _, update := range authorizedChanges.Update {
logger.Debug("updating rule", "rule_uid", update.New.UID, "diff", update.Diff.String())
upsert = append(upsert, store.UpsertRule{
updates = append(updates, store.UpdateRule{
Existing: update.Existing,
New: *update.New,
})
}
for _, rule := range authorizedChanges.New {
upsert = append(upsert, store.UpsertRule{
Existing: nil,
New: *rule,
})
inserts = append(inserts, *rule)
}
err = srv.store.UpsertAlertRules(tranCtx, upsert)
err = srv.store.InsertAlertRules(tranCtx, inserts)
if err != nil {
return fmt.Errorf("failed to add or update rules: %w", err)
return fmt.Errorf("failed to add rules: %w", err)
}
err = srv.store.UpdateAlertRules(tranCtx, updates)
if err != nil {
return fmt.Errorf("failed to update rules: %w", err)
}
}

View File

@ -27,7 +27,7 @@ type UpdateRuleGroupCmd struct {
RuleGroupConfig apimodels.PostableRuleGroupConfig
}
type UpsertRule struct {
type UpdateRule struct {
Existing *ngmodels.AlertRule
New ngmodels.AlertRule
}
@ -42,7 +42,8 @@ type RuleStore interface {
GetAlertRules(ctx context.Context, query *ngmodels.GetAlertRulesQuery) error
GetUserVisibleNamespaces(context.Context, int64, *models.SignedInUser) (map[string]*models.Folder, error)
GetNamespaceByTitle(context.Context, string, int64, *models.SignedInUser, bool) (*models.Folder, error)
UpsertAlertRules(ctx context.Context, rule []UpsertRule) error
InsertAlertRules(ctx context.Context, rule []ngmodels.AlertRule) error
UpdateAlertRules(ctx context.Context, rule []UpdateRule) error
}
func getAlertRuleByUID(sess *sqlstore.DBSession, alertRuleUID string, orgID int64) (*ngmodels.AlertRule, error) {
@ -107,52 +108,87 @@ func (st DBstore) GetAlertRuleByUID(ctx context.Context, query *ngmodels.GetAler
})
}
// UpsertAlertRules is a handler for creating/updating alert rules.
func (st DBstore) UpsertAlertRules(ctx context.Context, rules []UpsertRule) error {
// InsertAlertRules is a handler for creating/updating alert rules.
func (st DBstore) InsertAlertRules(ctx context.Context, rules []ngmodels.AlertRule) error {
return st.SQLStore.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error {
newRules := make([]ngmodels.AlertRule, 0, len(rules))
ruleVersions := make([]ngmodels.AlertRuleVersion, 0, len(rules))
for i := range rules {
r := rules[i]
uid, err := GenerateNewAlertRuleUID(sess, r.OrgID, r.Title)
if err != nil {
return fmt.Errorf("failed to generate UID for alert rule %q: %w", r.Title, err)
}
r.UID = uid
r.Version = 1
if err := st.validateAlertRule(r); err != nil {
return err
}
if err := (&r).PreSave(TimeNow); err != nil {
return err
}
newRules = append(newRules, r)
ruleVersions = append(ruleVersions, ngmodels.AlertRuleVersion{
RuleOrgID: r.OrgID,
RuleUID: r.UID,
RuleNamespaceUID: r.NamespaceUID,
RuleGroup: r.RuleGroup,
ParentVersion: 0,
Version: r.Version,
Created: r.Updated,
Condition: r.Condition,
Title: r.Title,
Data: r.Data,
IntervalSeconds: r.IntervalSeconds,
NoDataState: r.NoDataState,
ExecErrState: r.ExecErrState,
For: r.For,
Annotations: r.Annotations,
Labels: r.Labels,
})
}
if len(newRules) > 0 {
if _, err := sess.Insert(&newRules); err != nil {
if st.SQLStore.Dialect.IsUniqueConstraintViolation(err) {
return ngmodels.ErrAlertRuleUniqueConstraintViolation
}
return fmt.Errorf("failed to create new rules: %w", err)
}
}
if len(ruleVersions) > 0 {
if _, err := sess.Insert(&ruleVersions); err != nil {
return fmt.Errorf("failed to create new rule versions: %w", err)
}
}
return nil
})
}
// UpdateAlertRules is a handler for creating/updating alert rules.
func (st DBstore) UpdateAlertRules(ctx context.Context, rules []UpdateRule) error {
return st.SQLStore.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error {
newRules := make([]ngmodels.AlertRule, 0, len(rules))
ruleVersions := make([]ngmodels.AlertRuleVersion, 0, len(rules))
for _, r := range rules {
var parentVersion int64
switch r.Existing {
case nil: // new rule
uid, err := GenerateNewAlertRuleUID(sess, r.New.OrgID, r.New.Title)
if err != nil {
return fmt.Errorf("failed to generate UID for alert rule %q: %w", r.New.Title, err)
}
r.New.UID = uid
r.New.Version = 1
if err := st.validateAlertRule(r.New); err != nil {
return err
}
if err := (&r.New).PreSave(TimeNow); err != nil {
return err
}
newRules = append(newRules, r.New)
default:
r.New.ID = r.Existing.ID
r.New.Version = r.Existing.Version + 1
if err := st.validateAlertRule(r.New); err != nil {
return err
}
if err := (&r.New).PreSave(TimeNow); err != nil {
return err
}
// no way to update multiple rules at once
if _, err := sess.ID(r.Existing.ID).AllCols().Update(r.New); err != nil {
if st.SQLStore.Dialect.IsUniqueConstraintViolation(err) {
return ngmodels.ErrAlertRuleUniqueConstraintViolation
}
return fmt.Errorf("failed to update rule [%s] %s: %w", r.New.UID, r.New.Title, err)
}
parentVersion = r.Existing.Version
r.New.ID = r.Existing.ID
r.New.Version = r.Existing.Version + 1
if err := st.validateAlertRule(r.New); err != nil {
return err
}
if err := (&r.New).PreSave(TimeNow); err != nil {
return err
}
// no way to update multiple rules at once
if _, err := sess.ID(r.Existing.ID).AllCols().Update(r.New); err != nil {
if st.SQLStore.Dialect.IsUniqueConstraintViolation(err) {
return ngmodels.ErrAlertRuleUniqueConstraintViolation
}
return fmt.Errorf("failed to update rule [%s] %s: %w", r.New.UID, r.New.Title, err)
}
parentVersion = r.Existing.Version
ruleVersions = append(ruleVersions, ngmodels.AlertRuleVersion{
RuleOrgID: r.New.OrgID,
RuleUID: r.New.UID,
@ -172,7 +208,6 @@ func (st DBstore) UpsertAlertRules(ctx context.Context, rules []UpsertRule) erro
Labels: r.New.Labels,
})
}
if len(newRules) > 0 {
if _, err := sess.Insert(&newRules); err != nil {
if st.SQLStore.Dialect.IsUniqueConstraintViolation(err) {
@ -181,13 +216,11 @@ func (st DBstore) UpsertAlertRules(ctx context.Context, rules []UpsertRule) erro
return fmt.Errorf("failed to create new rules: %w", err)
}
}
if len(ruleVersions) > 0 {
if _, err := sess.Insert(&ruleVersions); err != nil {
return fmt.Errorf("failed to create new rule versions: %w", err)
}
}
return nil
})
}

View File

@ -228,7 +228,17 @@ func (f *FakeRuleStore) GetNamespaceByTitle(_ context.Context, title string, org
return nil, fmt.Errorf("not found")
}
func (f *FakeRuleStore) UpsertAlertRules(_ context.Context, q []UpsertRule) error {
func (f *FakeRuleStore) UpdateAlertRules(_ context.Context, q []UpdateRule) error {
f.mtx.Lock()
defer f.mtx.Unlock()
f.RecordedOps = append(f.RecordedOps, q)
if err := f.Hook(q); err != nil {
return err
}
return nil
}
func (f *FakeRuleStore) InsertAlertRules(_ context.Context, q []models.AlertRule) error {
f.mtx.Lock()
defer f.mtx.Unlock()
f.RecordedOps = append(f.RecordedOps, q)

View File

@ -81,35 +81,34 @@ func CreateTestAlertRule(t *testing.T, ctx context.Context, dbstore *store.DBsto
func CreateTestAlertRuleWithLabels(t *testing.T, ctx context.Context, dbstore *store.DBstore, intervalSeconds int64, orgID int64, labels map[string]string) *models.AlertRule {
ruleGroup := fmt.Sprintf("ruleGroup-%s", util.GenerateShortUID())
err := dbstore.UpsertAlertRules(ctx, []store.UpsertRule{
err := dbstore.InsertAlertRules(ctx, []models.AlertRule{
{
New: models.AlertRule{
ID: 0,
OrgID: orgID,
Title: fmt.Sprintf("an alert definition %s", util.GenerateShortUID()),
Condition: "A",
Data: []models.AlertQuery{
{
Model: json.RawMessage(`{
ID: 0,
OrgID: orgID,
Title: fmt.Sprintf("an alert definition %s", util.GenerateShortUID()),
Condition: "A",
Data: []models.AlertQuery{
{
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type":"math",
"expression":"2 + 2 > 1"
}`),
RelativeTimeRange: models.RelativeTimeRange{
From: models.Duration(5 * time.Hour),
To: models.Duration(3 * time.Hour),
},
RefID: "A",
RelativeTimeRange: models.RelativeTimeRange{
From: models.Duration(5 * time.Hour),
To: models.Duration(3 * time.Hour),
},
RefID: "A",
},
Labels: labels,
Annotations: map[string]string{"testAnnoKey": "testAnnoValue"},
IntervalSeconds: intervalSeconds,
NamespaceUID: "namespace",
RuleGroup: ruleGroup,
NoDataState: models.NoData,
ExecErrState: models.AlertingErrState,
},
Labels: labels,
Annotations: map[string]string{"testAnnoKey": "testAnnoValue"},
IntervalSeconds: intervalSeconds,
NamespaceUID: "namespace",
RuleGroup: ruleGroup,
NoDataState: models.NoData,
ExecErrState: models.AlertingErrState,
},
})
require.NoError(t, err)

View File

@ -356,41 +356,8 @@ func TestAlertRuleConflictingTitle(t *testing.T) {
Login: "admin",
})
interval, err := model.ParseDuration("1m")
require.NoError(t, err)
rules := newTestingRuleConfig(t)
rules := apimodels.PostableRuleGroupConfig{
Name: "arulegroup",
Rules: []apimodels.PostableExtendedRuleNode{
{
ApiRuleNode: &apimodels.ApiRuleNode{
For: interval,
Labels: map[string]string{"label1": "val1"},
Annotations: map[string]string{"annotation1": "val1"},
},
// this rule does not explicitly set no data and error states
// therefore it should get the default values
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
Title: "AlwaysFiring",
Condition: "A",
Data: []ngmodels.AlertQuery{
{
RefID: "A",
RelativeTimeRange: ngmodels.RelativeTimeRange{
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"type": "math",
"expression": "2 + 3 > 1"
}`),
},
},
},
},
},
}
buf := bytes.Buffer{}
enc := json.NewEncoder(&buf)
err = enc.Encode(&rules)
@ -410,7 +377,22 @@ func TestAlertRuleConflictingTitle(t *testing.T) {
assert.Equal(t, http.StatusAccepted, resp.StatusCode)
require.JSONEq(t, `{"message":"rule group updated successfully"}`, string(b))
// fetch the created rules, so we can get the uid's and trigger
// and update by reusing the uid's
resp, err = http.Get(u + "/" + rules.Name)
require.NoError(t, err)
var createdRuleGroup apimodels.GettableRuleGroupConfig
data, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
err = json.Unmarshal(data, &createdRuleGroup)
require.NoError(t, err)
require.Len(t, createdRuleGroup.Rules, 2)
t.Run("trying to create alert with same title under same folder should fail", func(t *testing.T) {
rules := newTestingRuleConfig(t)
buf := bytes.Buffer{}
enc := json.NewEncoder(&buf)
err = enc.Encode(&rules)
@ -428,10 +410,36 @@ func TestAlertRuleConflictingTitle(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, http.StatusInternalServerError, resp.StatusCode)
require.JSONEq(t, `{"message": "failed to update rule group: failed to add or update rules: a conflicting alert rule is found: rule title under the same organisation and folder should be unique"}`, string(b))
require.JSONEq(t, `{"message": "failed to update rule group: failed to add rules: a conflicting alert rule is found: rule title under the same organisation and folder should be unique"}`, string(b))
})
t.Run("trying to update an alert to the title of an existing alert in the same folder should fail", func(t *testing.T) {
rules := newTestingRuleConfig(t)
rules.Rules[0].GrafanaManagedAlert.UID = createdRuleGroup.Rules[0].GrafanaManagedAlert.UID
rules.Rules[1].GrafanaManagedAlert.UID = createdRuleGroup.Rules[1].GrafanaManagedAlert.UID
rules.Rules[1].GrafanaManagedAlert.Title = "AlwaysFiring"
buf := bytes.Buffer{}
enc := json.NewEncoder(&buf)
err = enc.Encode(&rules)
require.NoError(t, err)
u := fmt.Sprintf("http://admin:admin@%s/api/ruler/grafana/api/v1/rules/folder1", grafanaListedAddr)
// nolint:gosec
resp, err := http.Post(u, "application/json", &buf)
require.NoError(t, err)
t.Cleanup(func() {
err := resp.Body.Close()
require.NoError(t, err)
})
b, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
assert.Equal(t, http.StatusInternalServerError, resp.StatusCode)
require.JSONEq(t, `{"message": "failed to update rule group: failed to update rules: a conflicting alert rule is found: rule title under the same organisation and folder should be unique"}`, string(b))
})
t.Run("trying to create alert with same title under another folder should succeed", func(t *testing.T) {
rules := newTestingRuleConfig(t)
buf := bytes.Buffer{}
enc := json.NewEncoder(&buf)
err = enc.Encode(&rules)
@ -800,3 +808,71 @@ func TestRulerRulesFilterByDashboard(t *testing.T) {
require.JSONEq(t, `{"message": "panel_id must be set with dashboard_uid"}`, string(b))
}
}
func newTestingRuleConfig(t *testing.T) apimodels.PostableRuleGroupConfig {
interval, err := model.ParseDuration("1m")
require.NoError(t, err)
firstRule := apimodels.PostableExtendedRuleNode{
ApiRuleNode: &apimodels.ApiRuleNode{
For: interval,
Labels: map[string]string{"label1": "val1"},
Annotations: map[string]string{"annotation1": "val1"},
},
// this rule does not explicitly set no data and error states
// therefore it should get the default values
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
Title: "AlwaysFiring",
Condition: "A",
Data: []ngmodels.AlertQuery{
{
RefID: "A",
RelativeTimeRange: ngmodels.RelativeTimeRange{
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"type": "math",
"expression": "2 + 3 > 1"
}`),
},
},
},
}
secondRule := apimodels.PostableExtendedRuleNode{
ApiRuleNode: &apimodels.ApiRuleNode{
For: interval,
Labels: map[string]string{"label1": "val1"},
Annotations: map[string]string{"annotation1": "val1"},
},
// this rule does not explicitly set no data and error states
// therefore it should get the default values
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
Title: "AlwaysFiring2",
Condition: "A",
Data: []ngmodels.AlertQuery{
{
RefID: "A",
RelativeTimeRange: ngmodels.RelativeTimeRange{
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"type": "math",
"expression": "2 + 3 > 1"
}`),
},
},
},
}
return apimodels.PostableRuleGroupConfig{
Name: "arulegroup",
Rules: []apimodels.PostableExtendedRuleNode{
firstRule,
secondRule,
},
}
}