Alerting: some fixes for updating rules via the API (#38764)

* Alerting: Allow updating rules if quota are exceeded

* Check for rule UID uniqueness in POST request
This commit is contained in:
Sofia Papagiannaki 2021-09-02 19:38:42 +03:00 committed by GitHub
parent e5cdf9e4d1
commit c19d65b1ad
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 290 additions and 22 deletions

View File

@ -2,6 +2,7 @@ package api
import (
"errors"
"fmt"
"net/http"
"time"
@ -218,6 +219,32 @@ func (srv RulerSrv) RoutePostNameRulesConfig(c *models.ReqContext, ruleGroupConf
return toNamespaceErrorResponse(err)
}
//TODO: Should this belong in alerting-api?
if ruleGroupConfig.Name == "" {
return ErrResp(http.StatusBadRequest, errors.New("rule group name is not valid"), "")
}
alertRuleUIDs := make(map[string]struct{})
for _, r := range ruleGroupConfig.Rules {
cond := ngmodels.Condition{
Condition: r.GrafanaManagedAlert.Condition,
OrgID: c.SignedInUser.OrgId,
Data: r.GrafanaManagedAlert.Data,
}
if err := validateCondition(cond, c.SignedInUser, c.SkipCache, srv.DatasourceCache); err != nil {
return ErrResp(http.StatusBadRequest, err, "failed to validate alert rule %q", r.GrafanaManagedAlert.Title)
}
if r.GrafanaManagedAlert.UID != "" {
_, ok := alertRuleUIDs[r.GrafanaManagedAlert.UID]
if ok {
return ErrResp(http.StatusBadRequest, fmt.Errorf("conflicting UID %q found", r.GrafanaManagedAlert.UID), "failed to validate alert rule %q", r.GrafanaManagedAlert.Title)
}
alertRuleUIDs[r.GrafanaManagedAlert.UID] = struct{}{}
}
}
numOfNewRules := len(ruleGroupConfig.Rules) - len(alertRuleUIDs)
if numOfNewRules > 0 {
// quotas are checked in advanced
// that is acceptable under the assumption that there will be only one alert rule under the rule group
// alternatively we should check the quotas after the rule group update
@ -229,25 +256,6 @@ func (srv RulerSrv) RoutePostNameRulesConfig(c *models.ReqContext, ruleGroupConf
if limitReached {
return ErrResp(http.StatusForbidden, errors.New("quota reached"), "")
}
// TODO validate UID uniqueness in the payload
//TODO: Should this belong in alerting-api?
if ruleGroupConfig.Name == "" {
return ErrResp(http.StatusBadRequest, errors.New("rule group name is not valid"), "")
}
var alertRuleUIDs []string
for _, r := range ruleGroupConfig.Rules {
cond := ngmodels.Condition{
Condition: r.GrafanaManagedAlert.Condition,
OrgID: c.SignedInUser.OrgId,
Data: r.GrafanaManagedAlert.Data,
}
if err := validateCondition(cond, c.SignedInUser, c.SkipCache, srv.DatasourceCache); err != nil {
return ErrResp(http.StatusBadRequest, err, "failed to validate alert rule %s", r.GrafanaManagedAlert.Title)
}
alertRuleUIDs = append(alertRuleUIDs, r.GrafanaManagedAlert.UID)
}
if err := srv.store.UpdateRuleGroup(store.UpdateRuleGroupCmd{
@ -263,7 +271,7 @@ func (srv RulerSrv) RoutePostNameRulesConfig(c *models.ReqContext, ruleGroupConf
return ErrResp(http.StatusInternalServerError, err, "failed to update rule group")
}
for _, uid := range alertRuleUIDs {
for uid := range alertRuleUIDs {
srv.manager.RemoveByRuleUID(c.OrgId, uid)
}

View File

@ -1036,7 +1036,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
expectedResponse: `{"message":"failed to validate alert rule AlwaysFiring: invalid query A: data source not found: unknown"}`,
expectedResponse: `{"message":"failed to validate alert rule \"AlwaysFiring\": invalid query A: data source not found: unknown"}`,
},
{
desc: "alert rule with invalid condition",
@ -1066,7 +1066,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
expectedResponse: `{"message":"failed to validate alert rule AlwaysFiring: condition B not found in any query or expression: it should be one of: [A]"}`,
expectedResponse: `{"message":"failed to validate alert rule \"AlwaysFiring\": condition B not found in any query or expression: it should be one of: [A]"}`,
},
}
@ -1337,6 +1337,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
Interval: interval,
}
buf := bytes.Buffer{}
enc := json.NewEncoder(&buf)
@ -1378,6 +1379,125 @@ func TestAlertRuleCRUD(t *testing.T) {
assert.JSONEq(t, expectedGetNamespaceResponseBody, body)
}
// try to update by pass two rules with conflicting UIDs
{
interval, err := model.ParseDuration("30s")
require.NoError(t, err)
rules := apimodels.PostableRuleGroupConfig{
Name: "arulegroup",
Rules: []apimodels.PostableExtendedRuleNode{
{
ApiRuleNode: &apimodels.ApiRuleNode{
For: interval,
Labels: map[string]string{
"label1": "val42",
"foo": "bar",
},
Annotations: map[string]string{
"annotation1": "val42",
"foo": "bar",
},
},
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
UID: ruleUID,
Title: "AlwaysNormal",
Condition: "A",
Data: []ngmodels.AlertQuery{
{
RefID: "A",
RelativeTimeRange: ngmodels.RelativeTimeRange{
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"type": "math",
"expression": "2 + 3 < 1"
}`),
},
},
NoDataState: apimodels.NoDataState(ngmodels.Alerting),
ExecErrState: apimodels.ExecutionErrorState(ngmodels.AlertingErrState),
},
},
{
ApiRuleNode: &apimodels.ApiRuleNode{
For: interval,
Labels: map[string]string{
"label1": "val42",
"foo": "bar",
},
Annotations: map[string]string{
"annotation1": "val42",
"foo": "bar",
},
},
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
UID: ruleUID,
Title: "AlwaysAlerting",
Condition: "A",
Data: []ngmodels.AlertQuery{
{
RefID: "A",
RelativeTimeRange: ngmodels.RelativeTimeRange{
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"type": "math",
"expression": "2 + 3 > 1"
}`),
},
},
NoDataState: apimodels.NoDataState(ngmodels.Alerting),
ExecErrState: apimodels.ExecutionErrorState(ngmodels.AlertingErrState),
},
},
},
Interval: interval,
}
buf := bytes.Buffer{}
enc := json.NewEncoder(&buf)
err = enc.Encode(&rules)
require.NoError(t, err)
u := fmt.Sprintf("http://grafana:password@%s/api/ruler/grafana/api/v1/rules/default", grafanaListedAddr)
// nolint:gosec
resp, err := http.Post(u, "application/json", &buf)
require.NoError(t, err)
t.Cleanup(func() {
err := resp.Body.Close()
require.NoError(t, err)
})
b, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
assert.Equal(t, http.StatusBadRequest, resp.StatusCode)
require.JSONEq(t, fmt.Sprintf(`{"message":"failed to validate alert rule \"AlwaysAlerting\": conflicting UID \"%s\" found"}`, ruleUID), string(b))
// let's make sure that rule definitions are not affected by the failed POST request.
u = fmt.Sprintf("http://grafana:password@%s/api/ruler/grafana/api/v1/rules/default", grafanaListedAddr)
// nolint:gosec
resp, err = http.Get(u)
require.NoError(t, err)
t.Cleanup(func() {
err := resp.Body.Close()
require.NoError(t, err)
})
b, err = ioutil.ReadAll(resp.Body)
require.NoError(t, err)
assert.Equal(t, resp.StatusCode, 202)
body, m := rulesNamespaceWithoutVariableValues(t, b)
returnedUIDs, ok := m["default,arulegroup"]
assert.True(t, ok)
assert.Equal(t, 2, len(returnedUIDs))
assert.JSONEq(t, expectedGetNamespaceResponseBody, body)
}
// update the first rule and completely remove the other
{
forValue, err := model.ParseDuration("30s")
@ -1868,8 +1988,34 @@ func TestQuota(t *testing.T) {
interval, err := model.ParseDuration("1m")
require.NoError(t, err)
// Create rule under folder1
createRule(t, grafanaListedAddr, "default", "grafana", "password")
// get the generated rule UID
var ruleUID string
{
u := fmt.Sprintf("http://grafana:password@%s/api/ruler/grafana/api/v1/rules/default", grafanaListedAddr)
// nolint:gosec
resp, err := http.Get(u)
require.NoError(t, err)
t.Cleanup(func() {
err := resp.Body.Close()
require.NoError(t, err)
})
b, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
assert.Equal(t, resp.StatusCode, 202)
_, m := rulesNamespaceWithoutVariableValues(t, b)
generatedUIDs, ok := m["default,arulegroup"]
assert.True(t, ok)
assert.Equal(t, 1, len(generatedUIDs))
ruleUID = generatedUIDs[0]
}
// check quota limits
t.Run("when quota limit exceed", func(t *testing.T) {
t.Run("when quota limit exceed creating new rule should fail", func(t *testing.T) {
// get existing org quota
query := models.GetOrgQuotaByTargetQuery{OrgId: 1, Target: "alert_rule"}
err = sqlstore.GetOrgQuotaByTarget(&query)
@ -1913,8 +2059,8 @@ func TestQuota(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 > 1"
}`),
@ -1942,6 +2088,120 @@ func TestQuota(t *testing.T) {
assert.Equal(t, http.StatusForbidden, resp.StatusCode)
require.JSONEq(t, `{"message":"quota reached"}`, string(b))
})
t.Run("when quota limit exceed updating existing rule should succeed", func(t *testing.T) {
// try to create an alert rule
rules := apimodels.PostableRuleGroupConfig{
Name: "arulegroup",
Interval: interval,
Rules: []apimodels.PostableExtendedRuleNode{
{
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
Title: "Updated alert rule",
Condition: "A",
Data: []ngmodels.AlertQuery{
{
RefID: "A",
RelativeTimeRange: ngmodels.RelativeTimeRange{
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"type": "math",
"expression": "2 + 4 > 1"
}`),
},
},
UID: ruleUID,
},
},
},
}
buf := bytes.Buffer{}
enc := json.NewEncoder(&buf)
err = enc.Encode(&rules)
require.NoError(t, err)
u := fmt.Sprintf("http://grafana:password@%s/api/ruler/grafana/api/v1/rules/default", grafanaListedAddr)
// nolint:gosec
resp, err := http.Post(u, "application/json", &buf)
require.NoError(t, err)
t.Cleanup(func() {
err := resp.Body.Close()
require.NoError(t, err)
})
b, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
assert.Equal(t, http.StatusAccepted, resp.StatusCode)
require.JSONEq(t, `{"message":"rule group updated successfully"}`, string(b))
// let's make sure that rule definitions are updated correctly.
u = fmt.Sprintf("http://grafana:password@%s/api/ruler/grafana/api/v1/rules/default", grafanaListedAddr)
// nolint:gosec
resp, err = http.Get(u)
require.NoError(t, err)
t.Cleanup(func() {
err := resp.Body.Close()
require.NoError(t, err)
})
b, err = ioutil.ReadAll(resp.Body)
require.NoError(t, err)
assert.Equal(t, resp.StatusCode, 202)
body, m := rulesNamespaceWithoutVariableValues(t, b)
returnedUIDs, ok := m["default,arulegroup"]
assert.True(t, ok)
assert.Equal(t, 1, len(returnedUIDs))
assert.Equal(t, ruleUID, returnedUIDs[0])
assert.JSONEq(t, `
{
"default":[
{
"name":"arulegroup",
"interval":"1m",
"rules":[
{
"expr":"",
"grafana_alert":{
"id":1,
"orgId":1,
"title":"Updated alert rule",
"condition":"A",
"data":[
{
"refId":"A",
"queryType":"",
"relativeTimeRange":{
"from":18000,
"to":10800
},
"datasourceUid":"-100",
"model":{
"expression":"2 + 4 \u003E 1",
"intervalMs":1000,
"maxDataPoints":43200,
"type":"math"
}
}
],
"updated":"2021-02-21T01:10:30Z",
"intervalSeconds":60,
"version":2,
"uid":"uid",
"namespace_uid":"nsuid",
"namespace_id":1,
"rule_group":"arulegroup",
"no_data_state":"NoData",
"exec_err_state":"Alerting"
}
}
]
}
]
}`, body)
})
}
func TestEval(t *testing.T) {