Alerting: validate rules and calculate changes in API controller (#45072)

* Update API controller
   - add validation of rules API model
   - add function to calculate changes between the submitted alerts and existing alerts
   - update RoutePostNameRulesConfig to validate input models, calculate changes and apply in a transaction

* Update DBStore
   - delete unused storage method. All the logic is moved upstream.
   - upsert to not modify fields of new by values from the existing alert
   - if rule has UID do not try to pull it from db. (it is done upstream)

* Add rule generator
This commit is contained in:
Yuriy Tseretyan 2022-02-23 11:30:04 -05:00 committed by GitHub
parent a3a852be81
commit f75bea481d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 1824 additions and 275 deletions

View File

@ -95,7 +95,7 @@ func (api *API) RegisterAPIEndpoints(m *metrics.API) {
api.RegisterRulerApiEndpoints(NewForkedRuler(
api.DatasourceCache,
NewLotexRuler(proxy, logger),
&RulerSrv{DatasourceCache: api.DatasourceCache, QuotaService: api.QuotaService, scheduleService: api.Schedule, store: api.RuleStore, log: logger},
&RulerSrv{DatasourceCache: api.DatasourceCache, QuotaService: api.QuotaService, scheduleService: api.Schedule, store: api.RuleStore, log: logger, cfg: &api.Cfg.UnifiedAlerting},
), m)
api.RegisterTestingApiEndpoints(NewForkedTestingApi(
&TestingApiSrv{

View File

@ -1,6 +1,7 @@
package api
import (
"context"
"errors"
"fmt"
"net/http"
@ -9,6 +10,7 @@ import (
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/quota"
"github.com/grafana/grafana/pkg/setting"
"github.com/prometheus/common/model"
@ -29,8 +31,13 @@ type RulerSrv struct {
QuotaService *quota.QuotaService
scheduleService schedule.ScheduleService
log log.Logger
cfg *setting.UnifiedAlertingSettings
}
var (
errQuotaReached = errors.New("quota has been exceeded")
)
func (srv RulerSrv) RouteDeleteNamespaceRulesConfig(c *models.ReqContext) response.Response {
namespaceTitle := web.Params(c.Req)[":Namespace"]
namespace, err := srv.store.GetNamespaceByTitle(c.Req.Context(), namespaceTitle, c.SignedInUser.OrgId, c.SignedInUser, true)
@ -243,62 +250,77 @@ func (srv RulerSrv) RoutePostNameRulesConfig(c *models.ReqContext, ruleGroupConf
return toNamespaceErrorResponse(err)
}
//TODO: Should this belong in alerting-api?
if ruleGroupConfig.Name == "" {
return ErrResp(http.StatusBadRequest, errors.New("rule group name is not valid"), "")
rules, err := validateRuleGroup(&ruleGroupConfig, c.SignedInUser.OrgId, namespace, conditionValidator(c, srv.DatasourceCache), srv.cfg)
if err != nil {
return ErrResp(http.StatusBadRequest, err, "")
}
alertRuleUIDs := make(map[string]struct{})
for _, r := range ruleGroupConfig.Rules {
cond := ngmodels.Condition{
Condition: r.GrafanaManagedAlert.Condition,
OrgID: c.SignedInUser.OrgId,
Data: r.GrafanaManagedAlert.Data,
}
if err := validateCondition(c.Req.Context(), cond, c.SignedInUser, c.SkipCache, srv.DatasourceCache); err != nil {
return ErrResp(http.StatusBadRequest, err, "failed to validate alert rule %q", r.GrafanaManagedAlert.Title)
}
if r.GrafanaManagedAlert.UID != "" {
_, ok := alertRuleUIDs[r.GrafanaManagedAlert.UID]
if ok {
return ErrResp(http.StatusBadRequest, fmt.Errorf("conflicting UID %q found", r.GrafanaManagedAlert.UID), "failed to validate alert rule %q", r.GrafanaManagedAlert.Title)
}
alertRuleUIDs[r.GrafanaManagedAlert.UID] = struct{}{}
}
}
return srv.updateAlertRulesInGroup(c, namespace, ruleGroupConfig.Name, rules)
}
numOfNewRules := len(ruleGroupConfig.Rules) - len(alertRuleUIDs)
if numOfNewRules > 0 {
// quotas are checked in advanced
// that is acceptable under the assumption that there will be only one alert rule under the rule group
// alternatively we should check the quotas after the rule group update
// and rollback the transaction in case of violation
limitReached, err := srv.QuotaService.QuotaReached(c, "alert_rule")
func (srv RulerSrv) updateAlertRulesInGroup(c *models.ReqContext, namespace *models.Folder, groupName string, rules []*ngmodels.AlertRule) response.Response {
// TODO add create rules authz logic
var changes *RuleChanges = nil
err := srv.store.InTransaction(c.Req.Context(), func(tranCtx context.Context) error {
var err error
changes, err = calculateChanges(tranCtx, srv.store, c.SignedInUser.OrgId, namespace, groupName, rules)
if err != nil {
return ErrResp(http.StatusInternalServerError, err, "failed to get quota")
return err
}
if limitReached {
return ErrResp(http.StatusForbidden, errors.New("quota reached"), "")
}
}
if err := srv.store.UpdateRuleGroup(c.Req.Context(), store.UpdateRuleGroupCmd{
OrgID: c.SignedInUser.OrgId,
NamespaceUID: namespace.Uid,
RuleGroupConfig: ruleGroupConfig,
}); err != nil {
// TODO add update/delete authz logic
err = srv.store.UpsertAlertRules(tranCtx, changes.Upsert)
if err != nil {
return fmt.Errorf("failed to add or update rules: %w", err)
}
for _, rule := range changes.Delete {
if err = srv.store.DeleteAlertRuleByUID(tranCtx, c.SignedInUser.OrgId, rule.UID); err != nil {
return fmt.Errorf("failed to delete rule %d with UID %s: %w", rule.ID, rule.UID, err)
}
}
if changes.newRules > 0 {
limitReached, err := srv.QuotaService.CheckQuotaReached(tranCtx, "alert_rule", &quota.ScopeParameters{
OrgId: c.OrgId,
UserId: c.UserId,
}) // alert rule is table name
if err != nil {
return fmt.Errorf("failed to get alert rules quota: %w", err)
}
if limitReached {
return errQuotaReached
}
}
return nil
})
if err != nil {
if errors.Is(err, ngmodels.ErrAlertRuleNotFound) {
return ErrResp(http.StatusNotFound, err, "failed to update rule group")
} else if errors.Is(err, ngmodels.ErrAlertRuleFailedValidation) {
return ErrResp(http.StatusBadRequest, err, "failed to update rule group")
} else if errors.Is(err, errQuotaReached) {
return ErrResp(http.StatusForbidden, err, "")
}
return ErrResp(http.StatusInternalServerError, err, "failed to update rule group")
}
for uid := range alertRuleUIDs {
srv.scheduleService.UpdateAlertRule(ngmodels.AlertRuleKey{
// TODO uncomment when rules that are not changed will be filter out from the upsert list.
// for _, rule := range changes.Upsert {
// if rule.Existing != nil {
// srv.scheduleService.UpdateAlertRule(ngmodels.AlertRuleKey{
// OrgID: c.SignedInUser.OrgId,
// UID: rule.Existing.UID,
// })
// }
// }
for _, rule := range changes.Delete {
srv.scheduleService.DeleteAlertRule(ngmodels.AlertRuleKey{
OrgID: c.SignedInUser.OrgId,
UID: uid,
UID: rule.UID,
})
}
@ -341,3 +363,81 @@ func toNamespaceErrorResponse(err error) response.Response {
}
return apierrors.ToFolderErrorResponse(err)
}
type RuleChanges struct {
newRules int
Upsert []store.UpsertRule
Delete []*ngmodels.AlertRule
}
// calculateChanges calculates the difference between rules in the group in the database and the submitted rules. If a submitted rule has UID it tries to find it in the database (in other groups).
// returns a list of rules that need to be added, updated and deleted. Deleted considered rules in the database that belong to the group but do not exist in the list of submitted rules.
func calculateChanges(ctx context.Context, ruleStore store.RuleStore, orgId int64, namespace *models.Folder, ruleGroupName string, submittedRules []*ngmodels.AlertRule) (*RuleChanges, error) {
q := &ngmodels.ListRuleGroupAlertRulesQuery{
OrgID: orgId,
NamespaceUID: namespace.Uid,
RuleGroup: ruleGroupName,
}
if err := ruleStore.GetRuleGroupAlertRules(ctx, q); err != nil {
return nil, fmt.Errorf("failed to query database for rules in the group %s: %w", ruleGroupName, err)
}
existingGroupRules := q.Result
existingGroupRulesUIDs := make(map[string]*ngmodels.AlertRule, len(existingGroupRules))
for _, r := range existingGroupRules {
existingGroupRulesUIDs[r.UID] = r
}
upsert := make([]store.UpsertRule, 0, len(submittedRules))
toDelete := make([]*ngmodels.AlertRule, 0, len(submittedRules))
newRules := 0
for _, r := range submittedRules {
var existing *ngmodels.AlertRule = nil
if r.UID != "" {
if existingGroupRule, ok := existingGroupRulesUIDs[r.UID]; ok {
existing = existingGroupRule
// remove the rule from existingGroupRulesUIDs
delete(existingGroupRulesUIDs, r.UID)
} else {
// Rule can be from other group or namespace
q := &ngmodels.GetAlertRuleByUIDQuery{OrgID: orgId, UID: r.UID}
if err := ruleStore.GetAlertRuleByUID(ctx, q); err != nil || q.Result == nil {
// if rule has UID then it is considered an update. Therefore, fail if there is no rule to update
if errors.Is(err, ngmodels.ErrAlertRuleNotFound) || q.Result == nil && err == nil {
return nil, fmt.Errorf("failed to update rule with UID %s because %w", r.UID, ngmodels.ErrAlertRuleNotFound)
}
return nil, fmt.Errorf("failed to query database for an alert rule with UID %s: %w", r.UID, err)
}
existing = q.Result
}
}
if existing == nil {
upsert = append(upsert, store.UpsertRule{
Existing: nil,
New: *r,
})
newRules++
continue
}
ngmodels.PatchPartialAlertRule(existing, r)
// TODO diff between patched and existing, as well as between submitted
upsert = append(upsert, store.UpsertRule{
Existing: existing,
New: *r,
})
continue
}
for _, rule := range existingGroupRulesUIDs {
toDelete = append(toDelete, rule)
}
return &RuleChanges{
Upsert: upsert,
Delete: toDelete,
newRules: newRules,
}, nil
}

View File

@ -0,0 +1,279 @@
package api
import (
"context"
"errors"
"math/rand"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/stretchr/testify/require"
models2 "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/util"
)
func TestCalculateChanges(t *testing.T) {
orgId := rand.Int63()
t.Run("detects alerts that need to be added", func(t *testing.T) {
fakeStore := store.NewFakeRuleStore(t)
namespace := randFolder()
groupName := util.GenerateShortUID()
submitted := models.GenerateAlertRules(rand.Intn(5)+1, models.AlertRuleGen(withOrgID(orgId), simulateSubmitted, withoutUID))
changes, err := calculateChanges(context.Background(), fakeStore, orgId, namespace, groupName, submitted)
require.NoError(t, err)
require.Equal(t, changes.newRules, len(submitted))
require.Empty(t, changes.Delete)
require.Len(t, changes.Upsert, len(submitted))
for _, rule := range changes.Upsert {
require.Nil(t, rule.Existing)
}
opts := []cmp.Option{
cmp.FilterPath(func(path cmp.Path) bool {
return path.String() == "Data.modelProps"
}, cmp.Ignore()),
}
outerloop:
for _, expected := range submitted {
for _, rule := range changes.Upsert {
if cmp.Equal(*expected, rule.New, opts...) {
continue outerloop
}
}
require.Fail(t, "changes did not contain rule that was submitted")
}
})
t.Run("detects alerts that need to be deleted", func(t *testing.T) {
namespace := randFolder()
groupName := util.GenerateShortUID()
inDatabaseMap, inDatabase := models.GenerateUniqueAlertRules(rand.Intn(5)+1, models.AlertRuleGen(withOrgID(orgId), withGroup(groupName), withNamespace(namespace)))
fakeStore := store.NewFakeRuleStore(t)
fakeStore.PutRule(context.Background(), inDatabase...)
changes, err := calculateChanges(context.Background(), fakeStore, orgId, namespace, groupName, make([]*models.AlertRule, 0))
require.NoError(t, err)
require.Equal(t, 0, changes.newRules)
require.Len(t, changes.Upsert, 0)
require.Len(t, changes.Delete, len(inDatabaseMap))
for _, toDelete := range changes.Delete {
require.Contains(t, inDatabaseMap, toDelete.UID)
db := inDatabaseMap[toDelete.UID]
require.Equal(t, db, toDelete)
}
})
t.Run("should detect alerts that needs to be updated", func(t *testing.T) {
namespace := randFolder()
groupName := util.GenerateShortUID()
inDatabaseMap, inDatabase := models.GenerateUniqueAlertRules(rand.Intn(5)+1, models.AlertRuleGen(withOrgID(orgId), withGroup(groupName), withNamespace(namespace)))
submittedMap, submitted := models.GenerateUniqueAlertRules(len(inDatabase), models.AlertRuleGen(simulateSubmitted, withOrgID(orgId), withGroup(groupName), withNamespace(namespace), withUIDs(inDatabaseMap)))
fakeStore := store.NewFakeRuleStore(t)
fakeStore.PutRule(context.Background(), inDatabase...)
changes, err := calculateChanges(context.Background(), fakeStore, orgId, namespace, groupName, submitted)
require.NoError(t, err)
require.Len(t, changes.Upsert, len(inDatabase))
for _, upsert := range changes.Upsert {
require.NotNil(t, upsert.Existing)
require.Equal(t, upsert.Existing.UID, upsert.New.UID)
require.Equal(t, inDatabaseMap[upsert.Existing.UID], upsert.Existing)
require.Equal(t, *submittedMap[upsert.Existing.UID], upsert.New)
}
require.Len(t, changes.Delete, 0)
require.Equal(t, 0, changes.newRules)
})
t.Run("should patch rule with UID specified by existing rule", func(t *testing.T) {
testCases := []struct {
name string
mutator func(r *models.AlertRule)
}{
{
name: "title is empty",
mutator: func(r *models.AlertRule) {
r.Title = ""
},
},
{
name: "condition and data are empty",
mutator: func(r *models.AlertRule) {
r.Condition = ""
r.Data = nil
},
},
{
name: "ExecErrState is empty",
mutator: func(r *models.AlertRule) {
r.ExecErrState = ""
},
},
{
name: "NoDataState is empty",
mutator: func(r *models.AlertRule) {
r.NoDataState = ""
},
},
{
name: "For is 0",
mutator: func(r *models.AlertRule) {
r.For = 0
},
},
}
dbRule := models.AlertRuleGen(withOrgID(orgId))()
fakeStore := store.NewFakeRuleStore(t)
fakeStore.PutRule(context.Background(), dbRule)
namespace := randFolder()
groupName := util.GenerateShortUID()
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
expected := models.AlertRuleGen(simulateSubmitted, testCase.mutator)()
expected.UID = dbRule.UID
submitted := *expected
changes, err := calculateChanges(context.Background(), fakeStore, orgId, namespace, groupName, []*models.AlertRule{&submitted})
require.NoError(t, err)
require.Len(t, changes.Upsert, 1)
ch := changes.Upsert[0]
require.Equal(t, ch.Existing, dbRule)
fixed := *expected
models.PatchPartialAlertRule(dbRule, &fixed)
require.Equal(t, fixed, ch.New)
})
}
})
t.Run("should be able to find alerts by UID in other group/namespace", func(t *testing.T) {
inDatabaseMap, inDatabase := models.GenerateUniqueAlertRules(rand.Intn(10)+10, models.AlertRuleGen(withOrgID(orgId)))
fakeStore := store.NewFakeRuleStore(t)
fakeStore.PutRule(context.Background(), inDatabase...)
namespace := randFolder()
groupName := util.GenerateShortUID()
submittedMap, submitted := models.GenerateUniqueAlertRules(rand.Intn(len(inDatabase)-5)+5, models.AlertRuleGen(simulateSubmitted, withOrgID(orgId), withGroup(groupName), withNamespace(namespace), withUIDs(inDatabaseMap)))
changes, err := calculateChanges(context.Background(), fakeStore, orgId, namespace, groupName, submitted)
require.NoError(t, err)
require.Len(t, changes.Delete, 0)
require.Equal(t, 0, changes.newRules)
require.Len(t, changes.Upsert, len(submitted))
for _, upsert := range changes.Upsert {
require.NotNil(t, upsert.Existing)
require.Equal(t, upsert.Existing.UID, upsert.New.UID)
require.Equal(t, inDatabaseMap[upsert.Existing.UID], upsert.Existing)
require.Equal(t, *submittedMap[upsert.Existing.UID], upsert.New)
}
})
t.Run("should fail when submitted rule has UID that does not exist in db", func(t *testing.T) {
fakeStore := store.NewFakeRuleStore(t)
namespace := randFolder()
groupName := util.GenerateShortUID()
submitted := models.AlertRuleGen(withOrgID(orgId), simulateSubmitted)()
require.NotEqual(t, "", submitted.UID)
_, err := calculateChanges(context.Background(), fakeStore, orgId, namespace, groupName, []*models.AlertRule{submitted})
require.Error(t, err)
})
t.Run("should fail if cannot fetch current rules in the group", func(t *testing.T) {
fakeStore := store.NewFakeRuleStore(t)
expectedErr := errors.New("TEST ERROR")
fakeStore.Hook = func(cmd interface{}) error {
switch cmd.(type) {
case models.ListRuleGroupAlertRulesQuery:
return expectedErr
}
return nil
}
namespace := randFolder()
groupName := util.GenerateShortUID()
submitted := models.AlertRuleGen(withOrgID(orgId), simulateSubmitted, withoutUID)()
_, err := calculateChanges(context.Background(), fakeStore, orgId, namespace, groupName, []*models.AlertRule{submitted})
require.ErrorIs(t, err, expectedErr)
})
t.Run("should fail if cannot fetch rule by UID", func(t *testing.T) {
fakeStore := store.NewFakeRuleStore(t)
expectedErr := errors.New("TEST ERROR")
fakeStore.Hook = func(cmd interface{}) error {
switch cmd.(type) {
case models.GetAlertRuleByUIDQuery:
return expectedErr
}
return nil
}
namespace := randFolder()
groupName := util.GenerateShortUID()
submitted := models.AlertRuleGen(withOrgID(orgId), simulateSubmitted)()
_, err := calculateChanges(context.Background(), fakeStore, orgId, namespace, groupName, []*models.AlertRule{submitted})
require.Error(t, err, expectedErr)
})
}
func withOrgID(orgId int64) func(rule *models.AlertRule) {
return func(rule *models.AlertRule) {
rule.OrgID = orgId
}
}
func withGroup(groupName string) func(rule *models.AlertRule) {
return func(rule *models.AlertRule) {
rule.RuleGroup = groupName
}
}
func withNamespace(namespace *models2.Folder) func(rule *models.AlertRule) {
return func(rule *models.AlertRule) {
rule.NamespaceUID = namespace.Uid
}
}
// simulateSubmitted resets some fields of the structure that are not populated by API model to model conversion
func simulateSubmitted(rule *models.AlertRule) {
rule.ID = 0
rule.Version = 0
rule.Updated = time.Time{}
}
func withoutUID(rule *models.AlertRule) {
rule.UID = ""
}
func withUIDs(uids map[string]*models.AlertRule) func(rule *models.AlertRule) {
unused := make([]string, 0, len(uids))
for s := range uids {
unused = append(unused, s)
}
return func(rule *models.AlertRule) {
if len(unused) == 0 {
return
}
rule.UID = unused[0]
unused = unused[1:]
}
}

View File

@ -0,0 +1,183 @@
package api
import (
"errors"
"fmt"
"strconv"
"time"
"github.com/grafana/grafana/pkg/models"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/setting"
)
// validateRuleNode validates API model (definitions.PostableExtendedRuleNode) and converts it to models.AlertRule
func validateRuleNode(
ruleNode *apimodels.PostableExtendedRuleNode,
groupName string,
interval time.Duration,
orgId int64,
namespace *models.Folder,
conditionValidator func(ngmodels.Condition) error,
cfg *setting.UnifiedAlertingSettings) (*ngmodels.AlertRule, error) {
intervalSeconds := int64(interval.Seconds())
baseIntervalSeconds := int64(cfg.BaseInterval.Seconds())
if interval <= 0 {
return nil, fmt.Errorf("rule evaluation interval must be positive duration that is multiple of the base interval %d seconds", baseIntervalSeconds)
}
if intervalSeconds%baseIntervalSeconds != 0 {
return nil, fmt.Errorf("rule evaluation interval %d should be multiple of the base interval of %d seconds", int64(interval.Seconds()), baseIntervalSeconds)
}
if ruleNode.GrafanaManagedAlert == nil {
return nil, fmt.Errorf("not Grafana managed alert rule")
}
// if UID is specified then we can accept partial model. Therefore, some validation can be skipped as it will be patched later
canPatch := ruleNode.GrafanaManagedAlert.UID != ""
if ruleNode.GrafanaManagedAlert.Title == "" && !canPatch {
return nil, errors.New("alert rule title cannot be empty")
}
if len(ruleNode.GrafanaManagedAlert.Title) > store.AlertRuleMaxTitleLength {
return nil, fmt.Errorf("alert rule title is too long. Max length is %d", store.AlertRuleMaxTitleLength)
}
noDataState := ngmodels.NoData
if ruleNode.GrafanaManagedAlert.NoDataState == "" && canPatch {
noDataState = ""
}
if ruleNode.GrafanaManagedAlert.NoDataState != "" {
var err error
noDataState, err = ngmodels.NoDataStateFromString(string(ruleNode.GrafanaManagedAlert.NoDataState))
if err != nil {
return nil, err
}
}
errorState := ngmodels.AlertingErrState
if ruleNode.GrafanaManagedAlert.ExecErrState == "" && canPatch {
errorState = ""
}
if ruleNode.GrafanaManagedAlert.ExecErrState != "" {
var err error
errorState, err = ngmodels.ErrStateFromString(string(ruleNode.GrafanaManagedAlert.ExecErrState))
if err != nil {
return nil, err
}
}
if len(ruleNode.GrafanaManagedAlert.Data) == 0 {
if canPatch {
if ruleNode.GrafanaManagedAlert.Condition != "" {
return nil, fmt.Errorf("%w: query is not specified by condition is. You must specify both query and condition to update existing alert rule", ngmodels.ErrAlertRuleFailedValidation)
}
} else {
return nil, fmt.Errorf("%w: no queries or expressions are found", ngmodels.ErrAlertRuleFailedValidation)
}
}
if len(ruleNode.GrafanaManagedAlert.Data) != 0 {
cond := ngmodels.Condition{
Condition: ruleNode.GrafanaManagedAlert.Condition,
OrgID: orgId,
Data: ruleNode.GrafanaManagedAlert.Data,
}
if err := conditionValidator(cond); err != nil {
return nil, fmt.Errorf("failed to validate condition of alert rule %s: %w", ruleNode.GrafanaManagedAlert.Title, err)
}
}
newAlertRule := ngmodels.AlertRule{
OrgID: orgId,
Title: ruleNode.GrafanaManagedAlert.Title,
Condition: ruleNode.GrafanaManagedAlert.Condition,
Data: ruleNode.GrafanaManagedAlert.Data,
UID: ruleNode.GrafanaManagedAlert.UID,
IntervalSeconds: intervalSeconds,
NamespaceUID: namespace.Uid,
RuleGroup: groupName,
NoDataState: noDataState,
ExecErrState: errorState,
}
if ruleNode.ApiRuleNode != nil {
newAlertRule.For = time.Duration(ruleNode.ApiRuleNode.For)
newAlertRule.Annotations = ruleNode.ApiRuleNode.Annotations
newAlertRule.Labels = ruleNode.ApiRuleNode.Labels
dashUID := ruleNode.ApiRuleNode.Annotations[ngmodels.DashboardUIDAnnotation]
panelID := ruleNode.ApiRuleNode.Annotations[ngmodels.PanelIDAnnotation]
if dashUID != "" && panelID == "" || dashUID == "" && panelID != "" {
return nil, fmt.Errorf("both annotations %s and %s must be specified", ngmodels.DashboardUIDAnnotation, ngmodels.PanelIDAnnotation)
}
if dashUID != "" {
panelIDValue, err := strconv.ParseInt(panelID, 10, 64)
if err != nil {
return nil, fmt.Errorf("annotation %s must be a valid integer Panel ID", ngmodels.PanelIDAnnotation)
}
newAlertRule.DashboardUID = &dashUID
newAlertRule.PanelID = &panelIDValue
}
}
return &newAlertRule, nil
}
// validateRuleGroup validates API model (definitions.PostableRuleGroupConfig) and converts it to a collection of models.AlertRule.
// Returns a slice that contains all rules described by API model or error if either group specification or an alert definition is not valid.
func validateRuleGroup(
ruleGroupConfig *apimodels.PostableRuleGroupConfig,
orgId int64,
namespace *models.Folder,
conditionValidator func(ngmodels.Condition) error,
cfg *setting.UnifiedAlertingSettings) ([]*ngmodels.AlertRule, error) {
if ruleGroupConfig.Name == "" {
return nil, errors.New("rule group name cannot be empty")
}
if len(ruleGroupConfig.Name) > store.AlertRuleMaxRuleGroupNameLength {
return nil, fmt.Errorf("rule group name is too long. Max length is %d", store.AlertRuleMaxRuleGroupNameLength)
}
interval := time.Duration(ruleGroupConfig.Interval)
if interval == 0 {
// if group interval is 0 (undefined) then we automatically fall back to the default interval
interval = cfg.DefaultRuleEvaluationInterval
}
if interval < 0 || int64(interval.Seconds())%int64(cfg.BaseInterval.Seconds()) != 0 {
return nil, fmt.Errorf("rule evaluation interval (%d second) should be positive number that is multiple of the base interval of %d seconds", int64(interval.Seconds()), int64(cfg.BaseInterval.Seconds()))
}
// TODO should we validate that interval is >= cfg.MinInterval? Currently, we allow to save but fix the specified interval if it is < cfg.MinInterval
result := make([]*ngmodels.AlertRule, 0, len(ruleGroupConfig.Rules))
uids := make(map[string]int, cap(result))
for idx := range ruleGroupConfig.Rules {
rule, err := validateRuleNode(&ruleGroupConfig.Rules[idx], ruleGroupConfig.Name, interval, orgId, namespace, conditionValidator, cfg)
// TODO do not stop on the first failure but return all failures
if err != nil {
return nil, fmt.Errorf("invalid rule specification at index [%d]: %w", idx, err)
}
if rule.UID != "" {
if existingIdx, ok := uids[rule.UID]; ok {
return nil, fmt.Errorf("rule [%d] has UID %s that is already assigned to another rule at index %d", idx, rule.UID, existingIdx)
}
uids[rule.UID] = idx
}
result = append(result, rule)
}
return result, nil
}

View File

@ -0,0 +1,716 @@
package api
import (
"errors"
"fmt"
"strconv"
"testing"
"time"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
"golang.org/x/exp/rand"
models2 "github.com/grafana/grafana/pkg/models"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
var allNoData = []apimodels.NoDataState{
apimodels.OK,
apimodels.NoData,
apimodels.Alerting,
}
var allExecError = []apimodels.ExecutionErrorState{
apimodels.ErrorErrState,
apimodels.AlertingErrState,
}
func config(t *testing.T) *setting.UnifiedAlertingSettings {
t.Helper()
baseInterval := time.Duration(rand.Intn(99)+1) * time.Second
result := &setting.UnifiedAlertingSettings{
BaseInterval: baseInterval,
DefaultRuleEvaluationInterval: baseInterval * time.Duration(rand.Intn(9)+1),
}
t.Logf("Config Base interval is [%v]", result.BaseInterval)
return result
}
func validRule() apimodels.PostableExtendedRuleNode {
return apimodels.PostableExtendedRuleNode{
ApiRuleNode: &apimodels.ApiRuleNode{
For: model.Duration(rand.Int63n(1000)),
Labels: map[string]string{
"test-label": "data",
},
Annotations: map[string]string{
"test-annotation": "data",
},
},
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
Title: fmt.Sprintf("TEST-ALERT-%d", rand.Int63()),
Condition: "A",
Data: []models.AlertQuery{
{
RefID: "A",
QueryType: "TEST",
RelativeTimeRange: models.RelativeTimeRange{
From: 10,
To: 0,
},
DatasourceUID: "DATASOURCE_TEST",
Model: nil,
},
},
UID: util.GenerateShortUID(),
NoDataState: allNoData[rand.Intn(len(allNoData)-1)],
ExecErrState: allExecError[rand.Intn(len(allExecError)-1)],
},
}
}
func validGroup(cfg *setting.UnifiedAlertingSettings, rules ...apimodels.PostableExtendedRuleNode) apimodels.PostableRuleGroupConfig {
return apimodels.PostableRuleGroupConfig{
Name: "TEST-ALERTS-" + util.GenerateShortUID(),
Interval: model.Duration(cfg.BaseInterval * time.Duration(rand.Int63n(10))),
Rules: rules,
}
}
func randFolder() *models2.Folder {
return &models2.Folder{
Id: rand.Int63(),
Uid: util.GenerateShortUID(),
Title: "TEST-FOLDER-" + util.GenerateShortUID(),
Url: "",
Version: 0,
Created: time.Time{},
Updated: time.Time{},
UpdatedBy: 0,
CreatedBy: 0,
HasAcl: false,
}
}
func TestValidateRuleGroup(t *testing.T) {
orgId := rand.Int63()
folder := randFolder()
rules := make([]apimodels.PostableExtendedRuleNode, 0, rand.Intn(4)+1)
for i := 0; i < cap(rules); i++ {
rules = append(rules, validRule())
}
cfg := config(t)
t.Run("should validate struct and rules", func(t *testing.T) {
g := validGroup(cfg, rules...)
conditionValidations := 0
alerts, err := validateRuleGroup(&g, orgId, folder, func(condition models.Condition) error {
conditionValidations++
return nil
}, cfg)
require.NoError(t, err)
require.Len(t, alerts, len(rules))
require.Equal(t, len(rules), conditionValidations)
})
t.Run("should default to default interval from config if group interval is 0", func(t *testing.T) {
g := validGroup(cfg, rules...)
g.Interval = 0
alerts, err := validateRuleGroup(&g, orgId, folder, func(condition models.Condition) error {
return nil
}, cfg)
require.NoError(t, err)
for _, alert := range alerts {
require.Equal(t, int64(cfg.DefaultRuleEvaluationInterval.Seconds()), alert.IntervalSeconds)
}
})
}
func TestValidateRuleGroupFailures(t *testing.T) {
orgId := rand.Int63()
folder := randFolder()
cfg := config(t)
testCases := []struct {
name string
group func() *apimodels.PostableRuleGroupConfig
assert func(t *testing.T, apiModel *apimodels.PostableRuleGroupConfig, err error)
}{
{
name: "fail if title is empty",
group: func() *apimodels.PostableRuleGroupConfig {
g := validGroup(cfg)
g.Name = ""
return &g
},
},
{
name: "fail if title is too long",
group: func() *apimodels.PostableRuleGroupConfig {
g := validGroup(cfg)
for len(g.Name) < store.AlertRuleMaxRuleGroupNameLength {
g.Name += g.Name
}
return &g
},
},
{
name: "fail if interval is negative",
group: func() *apimodels.PostableRuleGroupConfig {
g := validGroup(cfg)
g.Interval = model.Duration(-(rand.Int63n(1000) + 1))
return &g
},
},
{
name: "fail if interval is not aligned with base interval",
group: func() *apimodels.PostableRuleGroupConfig {
g := validGroup(cfg)
g.Interval = model.Duration(cfg.BaseInterval + time.Duration(rand.Intn(10)+1)*time.Second)
return &g
},
},
{
name: "fail if two rules have same UID",
group: func() *apimodels.PostableRuleGroupConfig {
r1 := validRule()
r2 := validRule()
uid := util.GenerateShortUID()
r1.GrafanaManagedAlert.UID = uid
r2.GrafanaManagedAlert.UID = uid
g := validGroup(cfg, r1, r2)
return &g
},
assert: func(t *testing.T, apiModel *apimodels.PostableRuleGroupConfig, err error) {
require.Contains(t, err.Error(), apiModel.Rules[0].GrafanaManagedAlert.UID)
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
g := testCase.group()
_, err := validateRuleGroup(g, orgId, folder, func(condition models.Condition) error {
return nil
}, cfg)
require.Error(t, err)
if testCase.assert != nil {
testCase.assert(t, g, err)
}
})
}
}
func TestValidateRuleNode_NoUID(t *testing.T) {
orgId := rand.Int63()
folder := randFolder()
name := util.GenerateShortUID()
var cfg = config(t)
interval := cfg.BaseInterval * time.Duration(rand.Int63n(10)+1)
testCases := []struct {
name string
rule func() *apimodels.PostableExtendedRuleNode
assert func(t *testing.T, model *apimodels.PostableExtendedRuleNode, rule *models.AlertRule)
}{
{
name: "coverts api model to AlertRule",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, int64(0), alert.ID)
require.Equal(t, orgId, alert.OrgID)
require.Equal(t, api.GrafanaManagedAlert.Title, alert.Title)
require.Equal(t, api.GrafanaManagedAlert.Condition, alert.Condition)
require.Equal(t, api.GrafanaManagedAlert.Data, alert.Data)
require.Equal(t, time.Time{}, alert.Updated)
require.Equal(t, int64(interval.Seconds()), alert.IntervalSeconds)
require.Equal(t, int64(0), alert.Version)
require.Equal(t, api.GrafanaManagedAlert.UID, alert.UID)
require.Equal(t, folder.Uid, alert.NamespaceUID)
require.Nil(t, alert.DashboardUID)
require.Nil(t, alert.PanelID)
require.Equal(t, name, alert.RuleGroup)
require.Equal(t, models.NoDataState(api.GrafanaManagedAlert.NoDataState), alert.NoDataState)
require.Equal(t, models.ExecutionErrorState(api.GrafanaManagedAlert.ExecErrState), alert.ExecErrState)
require.Equal(t, time.Duration(api.ApiRuleNode.For), alert.For)
require.Equal(t, api.ApiRuleNode.Annotations, alert.Annotations)
require.Equal(t, api.ApiRuleNode.Labels, alert.Labels)
},
},
{
name: "coverts api without ApiRuleNode",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode = nil
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, time.Duration(0), alert.For)
require.Nil(t, alert.Annotations)
require.Nil(t, alert.Labels)
},
},
{
name: "defaults to NoData if NoDataState is empty",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.NoDataState = ""
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, models.NoData, alert.NoDataState)
},
},
{
name: "defaults to Alerting if ExecErrState is empty",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.ExecErrState = ""
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, models.AlertingErrState, alert.ExecErrState)
},
},
{
name: "extracts Dashboard UID and Panel Id from annotations",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode.Annotations = map[string]string{
models.DashboardUIDAnnotation: util.GenerateShortUID(),
models.PanelIDAnnotation: strconv.Itoa(rand.Int()),
}
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, api.ApiRuleNode.Annotations[models.DashboardUIDAnnotation], *alert.DashboardUID)
panelId, err := strconv.Atoi(api.ApiRuleNode.Annotations[models.PanelIDAnnotation])
require.NoError(t, err)
require.Equal(t, int64(panelId), *alert.PanelID)
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
r := testCase.rule()
r.GrafanaManagedAlert.UID = ""
alert, err := validateRuleNode(r, name, interval, orgId, folder, func(condition models.Condition) error {
return nil
}, cfg)
require.NoError(t, err)
testCase.assert(t, r, alert)
})
}
t.Run("accepts empty group name", func(t *testing.T) {
r := validRule()
alert, err := validateRuleNode(&r, "", interval, orgId, folder, func(condition models.Condition) error {
return nil
}, cfg)
require.NoError(t, err)
require.Equal(t, "", alert.RuleGroup)
})
}
func TestValidateRuleNodeFailures_NoUID(t *testing.T) {
orgId := rand.Int63()
folder := randFolder()
cfg := config(t)
successValidation := func(condition models.Condition) error {
return nil
}
testCases := []struct {
name string
interval *time.Duration
rule func() *apimodels.PostableExtendedRuleNode
conditionValidation func(condition models.Condition) error
assert func(t *testing.T, model *apimodels.PostableExtendedRuleNode, err error)
allowedIfNoUId bool
}{
{
name: "fail if GrafanaManagedAlert is not specified",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert = nil
return &r
},
},
{
name: "fail if title is empty",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Title = ""
return &r
},
},
{
name: "fail if title is too long",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
for len(r.GrafanaManagedAlert.Title) < store.AlertRuleMaxTitleLength {
r.GrafanaManagedAlert.Title += r.GrafanaManagedAlert.Title
}
return &r
},
},
{
name: "fail if NoDataState is not known",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.NoDataState = apimodels.NoDataState(util.GenerateShortUID())
return &r
},
},
{
name: "fail if ExecErrState is not known",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.ExecErrState = apimodels.ExecutionErrorState(util.GenerateShortUID())
return &r
},
},
{
name: "fail if there are not data (nil)",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Data = nil
return &r
},
},
{
name: "fail if there are not data (empty)",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Data = make([]models.AlertQuery, 0, 1)
return &r
},
},
{
name: "fail if validator function returns error",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
return &r
},
conditionValidation: func(condition models.Condition) error {
return errors.New("BAD alert condition")
},
},
{
name: "fail if Dashboard UID is specified but not Panel ID",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode.Annotations = map[string]string{
models.DashboardUIDAnnotation: util.GenerateShortUID(),
}
return &r
},
},
{
name: "fail if Dashboard UID is specified and Panel ID is NaN",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode.Annotations = map[string]string{
models.DashboardUIDAnnotation: util.GenerateShortUID(),
models.PanelIDAnnotation: util.GenerateShortUID(),
}
return &r
},
},
{
name: "fail if PanelID is specified but not Dashboard UID ",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode.Annotations = map[string]string{
models.PanelIDAnnotation: "0",
}
return &r
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
r := testCase.rule()
if r.GrafanaManagedAlert != nil {
r.GrafanaManagedAlert.UID = ""
}
f := successValidation
if testCase.conditionValidation != nil {
f = testCase.conditionValidation
}
interval := cfg.BaseInterval
if testCase.interval != nil {
interval = *testCase.interval
}
_, err := validateRuleNode(r, "", interval, orgId, folder, f, cfg)
require.Error(t, err)
if testCase.assert != nil {
testCase.assert(t, r, err)
}
})
}
}
func TestValidateRuleNode_UID(t *testing.T) {
orgId := rand.Int63()
folder := randFolder()
name := util.GenerateShortUID()
var cfg = config(t)
interval := cfg.BaseInterval * time.Duration(rand.Int63n(10)+1)
testCases := []struct {
name string
rule func() *apimodels.PostableExtendedRuleNode
assert func(t *testing.T, model *apimodels.PostableExtendedRuleNode, rule *models.AlertRule)
}{
{
name: "use empty Title",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Title = ""
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, "", alert.Title)
},
},
{
name: "use empty NoData if NoDataState is empty",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.NoDataState = ""
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, models.NoDataState(""), alert.NoDataState)
},
},
{
name: "use empty Alerting if ExecErrState is empty",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.ExecErrState = ""
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, models.ExecutionErrorState(""), alert.ExecErrState)
},
},
{
name: "use empty Condition and Data if they are empty",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Condition = ""
r.GrafanaManagedAlert.Data = nil
if rand.Int63()%2 == 0 {
r.GrafanaManagedAlert.Data = make([]models.AlertQuery, 0)
}
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, "", alert.Condition)
require.Len(t, alert.Data, 0)
},
},
{
name: "extracts Dashboard UID and Panel Id from annotations",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode.Annotations = map[string]string{
models.DashboardUIDAnnotation: util.GenerateShortUID(),
models.PanelIDAnnotation: strconv.Itoa(rand.Int()),
}
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Equal(t, api.ApiRuleNode.Annotations[models.DashboardUIDAnnotation], *alert.DashboardUID)
panelId, err := strconv.Atoi(api.ApiRuleNode.Annotations[models.PanelIDAnnotation])
require.NoError(t, err)
require.Equal(t, int64(panelId), *alert.PanelID)
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
r := testCase.rule()
alert, err := validateRuleNode(r, name, interval, orgId, folder, func(condition models.Condition) error {
return nil
}, cfg)
require.NoError(t, err)
testCase.assert(t, r, alert)
})
}
t.Run("accepts empty group name", func(t *testing.T) {
r := validRule()
alert, err := validateRuleNode(&r, "", interval, orgId, folder, func(condition models.Condition) error {
return nil
}, cfg)
require.NoError(t, err)
require.Equal(t, "", alert.RuleGroup)
})
}
func TestValidateRuleNodeFailures_UID(t *testing.T) {
orgId := rand.Int63()
folder := randFolder()
cfg := config(t)
successValidation := func(condition models.Condition) error {
return nil
}
testCases := []struct {
name string
interval *time.Duration
rule func() *apimodels.PostableExtendedRuleNode
conditionValidation func(condition models.Condition) error
assert func(t *testing.T, model *apimodels.PostableExtendedRuleNode, err error)
}{
{
name: "fail if GrafanaManagedAlert is not specified",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert = nil
return &r
},
},
{
name: "fail if title is too long",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
for len(r.GrafanaManagedAlert.Title) < store.AlertRuleMaxTitleLength {
r.GrafanaManagedAlert.Title += r.GrafanaManagedAlert.Title
}
return &r
},
},
{
name: "fail if there are not data (nil) but condition is set",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Data = nil
r.GrafanaManagedAlert.Condition = "A"
return &r
},
},
{
name: "fail if there are not data (empty) but condition is set",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Data = make([]models.AlertQuery, 0, 1)
r.GrafanaManagedAlert.Condition = "A"
return &r
},
},
{
name: "fail if validator function returns error",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
return &r
},
conditionValidation: func(condition models.Condition) error {
return errors.New("BAD alert condition")
},
},
{
name: "fail if Dashboard UID is specified but not Panel ID",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode.Annotations = map[string]string{
models.DashboardUIDAnnotation: util.GenerateShortUID(),
}
return &r
},
},
{
name: "fail if Dashboard UID is specified and Panel ID is NaN",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode.Annotations = map[string]string{
models.DashboardUIDAnnotation: util.GenerateShortUID(),
models.PanelIDAnnotation: util.GenerateShortUID(),
}
return &r
},
},
{
name: "fail if PanelID is specified but not Dashboard UID ",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.ApiRuleNode.Annotations = map[string]string{
models.PanelIDAnnotation: "0",
}
return &r
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
r := testCase.rule()
f := successValidation
if testCase.conditionValidation != nil {
f = testCase.conditionValidation
}
interval := cfg.BaseInterval
if testCase.interval != nil {
interval = *testCase.interval
}
_, err := validateRuleNode(r, "", interval, orgId, folder, f, cfg)
require.Error(t, err)
if testCase.assert != nil {
testCase.assert(t, r, err)
}
})
}
}
func TestValidateRuleNodeIntervalFailures(t *testing.T) {
cfg := config(t)
testCases := []struct {
name string
interval time.Duration
}{
{
name: "fail if interval is negative",
interval: -time.Duration(rand.Int63n(10)+1) * time.Second,
},
{
name: "fail if interval is 0",
interval: 0,
},
{
name: "fail if interval is not multiple of base interval",
interval: cfg.BaseInterval + time.Duration(rand.Int63n(int64(cfg.BaseInterval.Seconds())-2)+1)*time.Second,
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
r := validRule()
f := func(condition models.Condition) error {
return nil
}
_, err := validateRuleNode(&r, util.GenerateShortUID(), testCase.interval, rand.Int63(), randFolder(), f, cfg)
require.Error(t, err)
})
}
}

View File

@ -202,6 +202,13 @@ func validateCondition(ctx context.Context, c ngmodels.Condition, user *models.S
return nil
}
// conditionValidator returns a curried validateCondition that accepts only condition
func conditionValidator(c *models.ReqContext, cache datasources.CacheService) func(ngmodels.Condition) error {
return func(condition ngmodels.Condition) error {
return validateCondition(c.Req.Context(), condition, c.SignedInUser, c.SkipCache, cache)
}
}
func validateQueriesAndExpressions(ctx context.Context, data []ngmodels.AlertQuery, user *models.SignedInUser, skipCache bool, datasourceCache datasources.CacheService) (map[string]struct{}, error) {
refIDs := make(map[string]struct{})
if len(data) == 0 {

View File

@ -27,6 +27,19 @@ func (noDataState NoDataState) String() string {
return string(noDataState)
}
func NoDataStateFromString(state string) (NoDataState, error) {
switch state {
case string(Alerting):
return Alerting, nil
case string(NoData):
return NoData, nil
case string(OK):
return OK, nil
default:
return "", fmt.Errorf("unknown NoData state option %s", state)
}
}
const (
Alerting NoDataState = "Alerting"
NoData NoDataState = "NoData"
@ -39,9 +52,23 @@ func (executionErrorState ExecutionErrorState) String() string {
return string(executionErrorState)
}
func ErrStateFromString(opt string) (ExecutionErrorState, error) {
switch opt {
case string(Alerting):
return AlertingErrState, nil
case string(ErrorErrState):
return ErrorErrState, nil
case string(OkErrState):
return OkErrState, nil
default:
return "", fmt.Errorf("unknown Error state option %s", opt)
}
}
const (
AlertingErrState ExecutionErrorState = "Alerting"
ErrorErrState ExecutionErrorState = "Error"
OkErrState ExecutionErrorState = "OK"
)
const (
@ -218,3 +245,37 @@ func (c Condition) IsValid() bool {
// TODO search for refIDs in QueriesAndExpressions
return len(c.Data) != 0
}
// PatchPartialAlertRule patches `ruleToPatch` by `existingRule` following the rule that if a field of `ruleToPatch` is empty or has the default value, it is populated by the value of the corresponding field from `existingRule`.
// There are several exceptions:
// 1. Following fields are not patched and therefore will be ignored: AlertRule.ID, AlertRule.OrgID, AlertRule.Updated, AlertRule.Version, AlertRule.UID, AlertRule.DashboardUID, AlertRule.PanelID, AlertRule.Annotations and AlertRule.Labels
// 2. There are fields that are patched together:
// - AlertRule.Condition and AlertRule.Data
// If either of the pair is specified, neither is patched.
func PatchPartialAlertRule(existingRule *AlertRule, ruleToPatch *AlertRule) {
if ruleToPatch.Title == "" {
ruleToPatch.Title = existingRule.Title
}
if ruleToPatch.Condition == "" || len(ruleToPatch.Data) == 0 {
ruleToPatch.Condition = existingRule.Condition
ruleToPatch.Data = existingRule.Data
}
if ruleToPatch.IntervalSeconds == 0 {
ruleToPatch.IntervalSeconds = existingRule.IntervalSeconds
}
if ruleToPatch.NamespaceUID == "" {
ruleToPatch.NamespaceUID = existingRule.NamespaceUID
}
if ruleToPatch.RuleGroup == "" {
ruleToPatch.RuleGroup = existingRule.RuleGroup
}
if ruleToPatch.ExecErrState == "" {
ruleToPatch.ExecErrState = existingRule.ExecErrState
}
if ruleToPatch.NoDataState == "" {
ruleToPatch.NoDataState = existingRule.NoDataState
}
if ruleToPatch.For == 0 {
ruleToPatch.For = existingRule.For
}
}

View File

@ -0,0 +1,227 @@
package models
import (
"math/rand"
"strings"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/util"
)
func TestNoDataStateFromString(t *testing.T) {
allKnownNoDataStates := [...]NoDataState{
Alerting,
NoData,
OK,
}
t.Run("should parse known values", func(t *testing.T) {
for _, state := range allKnownNoDataStates {
stateStr := string(state)
actual, err := NoDataStateFromString(stateStr)
require.NoErrorf(t, err, "failed to parse a known state [%s]", stateStr)
require.Equal(t, state, actual)
}
})
t.Run("should fail to parse in different case", func(t *testing.T) {
for _, state := range allKnownNoDataStates {
stateStr := strings.ToLower(string(state))
actual, err := NoDataStateFromString(stateStr)
require.Errorf(t, err, "expected error for input value [%s]", stateStr)
require.Equal(t, NoDataState(""), actual)
}
})
t.Run("should fail to parse unknown values", func(t *testing.T) {
input := util.GenerateShortUID()
actual, err := NoDataStateFromString(input)
require.Errorf(t, err, "expected error for input value [%s]", input)
require.Equal(t, NoDataState(""), actual)
})
}
func TestErrStateFromString(t *testing.T) {
allKnownErrStates := [...]ExecutionErrorState{
AlertingErrState,
ErrorErrState,
OkErrState,
}
t.Run("should parse known values", func(t *testing.T) {
for _, state := range allKnownErrStates {
stateStr := string(state)
actual, err := ErrStateFromString(stateStr)
require.NoErrorf(t, err, "failed to parse a known state [%s]", stateStr)
require.Equal(t, state, actual)
}
})
t.Run("should fail to parse in different case", func(t *testing.T) {
for _, state := range allKnownErrStates {
stateStr := strings.ToLower(string(state))
actual, err := ErrStateFromString(stateStr)
require.Errorf(t, err, "expected error for input value [%s]", stateStr)
require.Equal(t, ExecutionErrorState(""), actual)
}
})
t.Run("should fail to parse unknown values", func(t *testing.T) {
input := util.GenerateShortUID()
actual, err := ErrStateFromString(input)
require.Errorf(t, err, "expected error for input value [%s]", input)
require.Equal(t, ExecutionErrorState(""), actual)
})
}
func TestPatchPartialAlertRule(t *testing.T) {
t.Run("patches", func(t *testing.T) {
testCases := []struct {
name string
mutator func(r *AlertRule)
}{
{
name: "title is empty",
mutator: func(r *AlertRule) {
r.Title = ""
},
},
{
name: "condition and data are empty",
mutator: func(r *AlertRule) {
r.Condition = ""
r.Data = nil
},
},
{
name: "ExecErrState is empty",
mutator: func(r *AlertRule) {
r.ExecErrState = ""
},
},
{
name: "NoDataState is empty",
mutator: func(r *AlertRule) {
r.NoDataState = ""
},
},
{
name: "For is 0",
mutator: func(r *AlertRule) {
r.For = 0
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
var existing *AlertRule
for {
existing = AlertRuleGen(func(rule *AlertRule) {
rule.For = time.Duration(rand.Int63n(1000) + 1)
})()
cloned := *existing
testCase.mutator(&cloned)
if !cmp.Equal(*existing, cloned, cmp.FilterPath(func(path cmp.Path) bool {
return path.String() == "Data.modelProps"
}, cmp.Ignore())) {
break
}
}
patch := *existing
testCase.mutator(&patch)
require.NotEqual(t, *existing, patch)
PatchPartialAlertRule(existing, &patch)
require.Equal(t, *existing, patch)
})
}
})
t.Run("does not patch", func(t *testing.T) {
testCases := []struct {
name string
mutator func(r *AlertRule)
}{
{
name: "ID",
mutator: func(r *AlertRule) {
r.ID = 0
},
},
{
name: "OrgID",
mutator: func(r *AlertRule) {
r.OrgID = 0
},
},
{
name: "Updated",
mutator: func(r *AlertRule) {
r.Updated = time.Time{}
},
},
{
name: "Version",
mutator: func(r *AlertRule) {
r.Version = 0
},
},
{
name: "UID",
mutator: func(r *AlertRule) {
r.UID = ""
},
},
{
name: "DashboardUID",
mutator: func(r *AlertRule) {
r.DashboardUID = nil
},
},
{
name: "PanelID",
mutator: func(r *AlertRule) {
r.PanelID = nil
},
},
{
name: "Annotations",
mutator: func(r *AlertRule) {
r.Annotations = nil
},
},
{
name: "Labels",
mutator: func(r *AlertRule) {
r.Labels = nil
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
var existing *AlertRule
for {
existing = AlertRuleGen()()
cloned := *existing
// make sure the generated rule does not match the mutated one
testCase.mutator(&cloned)
if !cmp.Equal(*existing, cloned, cmp.FilterPath(func(path cmp.Path) bool {
return path.String() == "Data.modelProps"
}, cmp.Ignore())) {
break
}
}
patch := *existing
testCase.mutator(&patch)
PatchPartialAlertRule(existing, &patch)
require.NotEqual(t, *existing, patch)
})
}
})
}

View File

@ -0,0 +1,127 @@
package models
import (
"encoding/json"
"math/rand"
"time"
"github.com/grafana/grafana/pkg/util"
)
// AlertRuleGen provides a factory function that generates a random AlertRule.
// The mutators arguments allows changing fields of the resulting structure
func AlertRuleGen(mutators ...func(*AlertRule)) func() *AlertRule {
return func() *AlertRule {
randNoDataState := func() NoDataState {
s := [...]NoDataState{
Alerting,
NoData,
OK,
}
return s[rand.Intn(len(s)-1)]
}
randErrState := func() ExecutionErrorState {
s := [...]ExecutionErrorState{
AlertingErrState,
ErrorErrState,
OkErrState,
}
return s[rand.Intn(len(s)-1)]
}
interval := (rand.Int63n(6) + 1) * 10
forInterval := time.Duration(interval*rand.Int63n(6)) * time.Second
var annotations map[string]string = nil
if rand.Int63()%2 == 0 {
qty := rand.Intn(5)
annotations = make(map[string]string, qty)
for i := 0; i < qty; i++ {
annotations[util.GenerateShortUID()] = util.GenerateShortUID()
}
}
var labels map[string]string = nil
if rand.Int63()%2 == 0 {
qty := rand.Intn(5)
labels = make(map[string]string, qty)
for i := 0; i < qty; i++ {
labels[util.GenerateShortUID()] = util.GenerateShortUID()
}
}
var dashUID *string = nil
var panelID *int64 = nil
if rand.Int63()%2 == 0 {
d := util.GenerateShortUID()
dashUID = &d
p := rand.Int63()
panelID = &p
}
rule := &AlertRule{
ID: rand.Int63(),
OrgID: rand.Int63(),
Title: "TEST-ALERT-" + util.GenerateShortUID(),
Condition: "A",
Data: []AlertQuery{
{
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type":"math",
"expression":"2 + 1 < 1"
}`),
RelativeTimeRange: RelativeTimeRange{
From: Duration(5 * time.Hour),
To: Duration(3 * time.Hour),
},
RefID: "A",
}},
Updated: time.Now().Add(-time.Duration(rand.Intn(100) + 1)),
IntervalSeconds: rand.Int63n(60) + 1,
Version: rand.Int63(),
UID: util.GenerateShortUID(),
NamespaceUID: util.GenerateShortUID(),
DashboardUID: dashUID,
PanelID: panelID,
RuleGroup: "TEST-GROUP-" + util.GenerateShortUID(),
NoDataState: randNoDataState(),
ExecErrState: randErrState(),
For: forInterval,
Annotations: annotations,
Labels: labels,
}
for _, mutator := range mutators {
mutator(rule)
}
return rule
}
}
// GenerateUniqueAlertRules generates many random alert rules and makes sure that they have unique UID.
// It returns a tuple where first element is a map where keys are UID of alert rule and the second element is a slice of the same rules
func GenerateUniqueAlertRules(count int, f func() *AlertRule) (map[string]*AlertRule, []*AlertRule) {
uIDs := make(map[string]*AlertRule, count)
result := make([]*AlertRule, 0, count)
for len(result) < count {
rule := f()
if _, ok := uIDs[rule.UID]; ok {
continue
}
result = append(result, rule)
uIDs[rule.UID] = rule
}
return uIDs, result
}
// GenerateAlertRules generates many random alert rules. Does not guarantee that rules are unique (by UID)
func GenerateAlertRules(count int, f func() *AlertRule) []*AlertRule {
result := make([]*AlertRule, 0, count)
for len(result) < count {
rule := f()
result = append(result, rule)
}
return result
}

View File

@ -2,9 +2,7 @@ package store
import (
"context"
"errors"
"fmt"
"strconv"
"strings"
"time"
@ -50,7 +48,7 @@ type RuleStore interface {
GetNamespaceByTitle(context.Context, string, int64, *models.SignedInUser, bool) (*models.Folder, error)
GetOrgRuleGroups(ctx context.Context, query *ngmodels.ListOrgRuleGroupsQuery) error
UpsertAlertRules(ctx context.Context, rule []UpsertRule) error
UpdateRuleGroup(ctx context.Context, cmd UpdateRuleGroupCmd) error
InTransaction(ctx context.Context, f func(ctx context.Context) error) error
}
func getAlertRuleByUID(sess *sqlstore.DBSession, alertRuleUID string, orgID int64) (*ngmodels.AlertRule, error) {
@ -188,18 +186,6 @@ func (st DBstore) UpsertAlertRules(ctx context.Context, rules []UpsertRule) erro
newRules := make([]ngmodels.AlertRule, 0, len(rules))
ruleVersions := make([]ngmodels.AlertRuleVersion, 0, len(rules))
for _, r := range rules {
if r.Existing == nil && r.New.UID != "" {
// check by UID
existingAlertRule, err := getAlertRuleByUID(sess, r.New.UID, r.New.OrgID)
if err != nil {
if errors.Is(err, ngmodels.ErrAlertRuleNotFound) {
return fmt.Errorf("failed to get alert rule %s: %w", r.New.UID, err)
}
return err
}
r.Existing = existingAlertRule
}
var parentVersion int64
switch r.Existing {
case nil: // new rule
@ -208,23 +194,8 @@ func (st DBstore) UpsertAlertRules(ctx context.Context, rules []UpsertRule) erro
return fmt.Errorf("failed to generate UID for alert rule %q: %w", r.New.Title, err)
}
r.New.UID = uid
if r.New.IntervalSeconds == 0 {
r.New.IntervalSeconds = int64(st.DefaultInterval.Seconds())
}
r.New.Version = 1
if r.New.NoDataState == "" {
// set default no data state
r.New.NoDataState = ngmodels.NoData
}
if r.New.ExecErrState == "" {
// set default error state
r.New.ExecErrState = ngmodels.AlertingErrState
}
if err := st.validateAlertRule(r.New); err != nil {
return err
}
@ -232,37 +203,11 @@ func (st DBstore) UpsertAlertRules(ctx context.Context, rules []UpsertRule) erro
if err := (&r.New).PreSave(TimeNow); err != nil {
return err
}
newRules = append(newRules, r.New)
default:
// explicitly set the existing properties if missing
// do not rely on xorm
if r.New.Title == "" {
r.New.Title = r.Existing.Title
}
if r.New.Condition == "" {
r.New.Condition = r.Existing.Condition
}
if len(r.New.Data) == 0 {
r.New.Data = r.Existing.Data
}
r.New.ID = r.Existing.ID
r.New.OrgID = r.Existing.OrgID
r.New.NamespaceUID = r.Existing.NamespaceUID
r.New.RuleGroup = r.Existing.RuleGroup
r.New.Version = r.Existing.Version + 1
if r.New.ExecErrState == "" {
r.New.ExecErrState = r.Existing.ExecErrState
}
if r.New.NoDataState == "" {
r.New.NoDataState = r.Existing.NoDataState
}
if err := st.validateAlertRule(r.New); err != nil {
return err
}
@ -273,9 +218,11 @@ func (st DBstore) UpsertAlertRules(ctx context.Context, rules []UpsertRule) erro
// no way to update multiple rules at once
if _, err := sess.ID(r.Existing.ID).AllCols().Update(r.New); err != nil {
return fmt.Errorf("failed to update rule %s: %w", r.New.Title, err)
if st.SQLStore.Dialect.IsUniqueConstraintViolation(err) {
return ngmodels.ErrAlertRuleUniqueConstraintViolation
}
return fmt.Errorf("failed to update rule [%s] %s: %w", r.New.UID, r.New.Title, err)
}
parentVersion = r.Existing.Version
}
@ -301,6 +248,9 @@ func (st DBstore) UpsertAlertRules(ctx context.Context, rules []UpsertRule) erro
if len(newRules) > 0 {
if _, err := sess.Insert(&newRules); err != nil {
if st.SQLStore.Dialect.IsUniqueConstraintViolation(err) {
return ngmodels.ErrAlertRuleUniqueConstraintViolation
}
return fmt.Errorf("failed to create new rules: %w", err)
}
}
@ -506,100 +456,6 @@ func (st DBstore) validateAlertRule(alertRule ngmodels.AlertRule) error {
return nil
}
// UpdateRuleGroup creates new rules and updates and/or deletes existing rules
func (st DBstore) UpdateRuleGroup(ctx context.Context, cmd UpdateRuleGroupCmd) error {
return st.SQLStore.WithTransactionalDbSession(ctx, func(sess *sqlstore.DBSession) error {
ruleGroup := cmd.RuleGroupConfig.Name
q := &ngmodels.ListRuleGroupAlertRulesQuery{
OrgID: cmd.OrgID,
NamespaceUID: cmd.NamespaceUID,
RuleGroup: ruleGroup,
}
if err := st.GetRuleGroupAlertRules(ctx, q); err != nil {
return err
}
existingGroupRules := q.Result
existingGroupRulesUIDs := make(map[string]ngmodels.AlertRule, len(existingGroupRules))
for _, r := range existingGroupRules {
existingGroupRulesUIDs[r.UID] = *r
}
upsertRules := make([]UpsertRule, 0)
for _, r := range cmd.RuleGroupConfig.Rules {
if r.GrafanaManagedAlert == nil {
continue
}
newAlertRule := ngmodels.AlertRule{
OrgID: cmd.OrgID,
Title: r.GrafanaManagedAlert.Title,
Condition: r.GrafanaManagedAlert.Condition,
Data: r.GrafanaManagedAlert.Data,
UID: r.GrafanaManagedAlert.UID,
IntervalSeconds: int64(time.Duration(cmd.RuleGroupConfig.Interval).Seconds()),
NamespaceUID: cmd.NamespaceUID,
RuleGroup: ruleGroup,
NoDataState: ngmodels.NoDataState(r.GrafanaManagedAlert.NoDataState),
ExecErrState: ngmodels.ExecutionErrorState(r.GrafanaManagedAlert.ExecErrState),
}
if r.ApiRuleNode != nil {
newAlertRule.For = time.Duration(r.ApiRuleNode.For)
newAlertRule.Annotations = r.ApiRuleNode.Annotations
newAlertRule.Labels = r.ApiRuleNode.Labels
}
if s := newAlertRule.Annotations[ngmodels.DashboardUIDAnnotation]; s != "" {
newAlertRule.DashboardUID = &s
}
if s := newAlertRule.Annotations[ngmodels.PanelIDAnnotation]; s != "" {
panelID, err := strconv.ParseInt(s, 10, 64)
if err != nil {
return fmt.Errorf("the %s annotation does not contain a valid Panel ID: %w", ngmodels.PanelIDAnnotation, err)
}
newAlertRule.PanelID = &panelID
}
upsertRule := UpsertRule{
New: newAlertRule,
}
if existingGroupRule, ok := existingGroupRulesUIDs[r.GrafanaManagedAlert.UID]; ok {
upsertRule.Existing = &existingGroupRule
// remove the rule from existingGroupRulesUIDs
delete(existingGroupRulesUIDs, r.GrafanaManagedAlert.UID)
}
upsertRules = append(upsertRules, upsertRule)
}
if err := st.UpsertAlertRules(ctx, upsertRules); err != nil {
if st.SQLStore.Dialect.IsUniqueConstraintViolation(err) {
return ngmodels.ErrAlertRuleUniqueConstraintViolation
}
return err
}
// delete instances for rules that will not be removed
for _, rule := range existingGroupRules {
if _, ok := existingGroupRulesUIDs[rule.UID]; !ok {
if err := st.DeleteAlertInstancesByRuleUID(ctx, cmd.OrgID, rule.UID); err != nil {
return err
}
}
}
// delete the remaining rules
for ruleUID := range existingGroupRulesUIDs {
if err := st.DeleteAlertRuleByUID(ctx, cmd.OrgID, ruleUID); err != nil {
return err
}
}
return nil
})
}
func (st DBstore) GetOrgRuleGroups(ctx context.Context, query *ngmodels.ListOrgRuleGroupsQuery) error {
return st.SQLStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error {
var ruleGroups [][]string
@ -646,3 +502,7 @@ WHERE org_id = ?`
return nil
})
}
func (st *DBstore) InTransaction(ctx context.Context, f func(ctx context.Context) error) error {
return st.SQLStore.InTransaction(ctx, f)
}

View File

@ -42,11 +42,32 @@ type FakeRuleStore struct {
}
// PutRule puts the rule in the Rules map. If there are existing rule in the same namespace, they will be overwritten
func (f *FakeRuleStore) PutRule(_ context.Context, r *models.AlertRule) {
func (f *FakeRuleStore) PutRule(_ context.Context, rules ...*models.AlertRule) {
f.mtx.Lock()
defer f.mtx.Unlock()
f.Rules[r.OrgID][r.RuleGroup][r.NamespaceUID] = []*models.AlertRule{
r,
mainloop:
for _, r := range rules {
rgs, ok := f.Rules[r.OrgID]
if !ok {
f.Rules[r.OrgID] = map[string]map[string][]*models.AlertRule{}
}
rg, ok := rgs[r.RuleGroup]
if !ok {
f.Rules[r.OrgID][r.RuleGroup] = map[string][]*models.AlertRule{}
}
_, ok = rg[r.NamespaceUID]
if !ok {
f.Rules[r.OrgID][r.RuleGroup][r.NamespaceUID] = []*models.AlertRule{}
}
for idx, rulePtr := range f.Rules[r.OrgID][r.RuleGroup][r.NamespaceUID] {
if rulePtr.UID == r.UID {
f.Rules[r.OrgID][r.RuleGroup][r.NamespaceUID][idx] = r
continue mainloop
}
}
f.Rules[r.OrgID][r.RuleGroup][r.NamespaceUID] = append(f.Rules[r.OrgID][r.RuleGroup][r.NamespaceUID], r)
}
}
@ -256,6 +277,10 @@ func (f *FakeRuleStore) UpdateRuleGroup(_ context.Context, cmd UpdateRuleGroupCm
return nil
}
func (f *FakeRuleStore) InTransaction(ctx context.Context, fn func(c context.Context) error) error {
return fn(ctx)
}
type FakeInstanceStore struct {
mtx sync.Mutex
RecordedOps []interface{}

View File

@ -12,7 +12,6 @@ import (
databasestore "github.com/grafana/grafana/pkg/services/dashboards/database"
dashboardservice "github.com/grafana/grafana/pkg/services/dashboards/manager"
"github.com/grafana/grafana/pkg/services/ngalert"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
@ -23,7 +22,6 @@ import (
"github.com/grafana/grafana/pkg/util"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
)
@ -59,36 +57,33 @@ func SetupTestEnv(t *testing.T, baseInterval time.Duration) (*ngalert.AlertNG, *
// CreateTestAlertRule creates a dummy alert definition to be used by the tests.
func CreateTestAlertRule(t *testing.T, ctx context.Context, dbstore *store.DBstore, intervalSeconds int64, orgID int64) *models.AlertRule {
ruleGroup := fmt.Sprintf("ruleGroup-%s", util.GenerateShortUID())
err := dbstore.UpdateRuleGroup(ctx, store.UpdateRuleGroupCmd{
OrgID: orgID,
NamespaceUID: "namespace",
RuleGroupConfig: apimodels.PostableRuleGroupConfig{
Name: ruleGroup,
Interval: model.Duration(time.Duration(intervalSeconds) * time.Second),
Rules: []apimodels.PostableExtendedRuleNode{
{
ApiRuleNode: &apimodels.ApiRuleNode{
Annotations: map[string]string{"testAnnoKey": "testAnnoValue"},
},
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
Title: fmt.Sprintf("an alert definition %s", util.GenerateShortUID()),
Condition: "A",
Data: []models.AlertQuery{
{
Model: json.RawMessage(`{
err := dbstore.UpsertAlertRules(ctx, []store.UpsertRule{
{
New: models.AlertRule{
ID: 0,
OrgID: orgID,
Title: fmt.Sprintf("an alert definition %s", util.GenerateShortUID()),
Condition: "A",
Data: []models.AlertQuery{
{
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type":"math",
"expression":"2 + 2 > 1"
}`),
RelativeTimeRange: models.RelativeTimeRange{
From: models.Duration(5 * time.Hour),
To: models.Duration(3 * time.Hour),
},
RefID: "A",
},
RelativeTimeRange: models.RelativeTimeRange{
From: models.Duration(5 * time.Hour),
To: models.Duration(3 * time.Hour),
},
RefID: "A",
},
},
Annotations: map[string]string{"testAnnoKey": "testAnnoValue"},
IntervalSeconds: intervalSeconds,
NamespaceUID: "namespace",
RuleGroup: ruleGroup,
NoDataState: models.NoData,
ExecErrState: models.AlertingErrState,
},
},
})
@ -107,38 +102,3 @@ func CreateTestAlertRule(t *testing.T, ctx context.Context, dbstore *store.DBsto
t.Logf("alert definition: %v with title: %q interval: %d created", rule.GetKey(), rule.Title, rule.IntervalSeconds)
return rule
}
// updateTestAlertRule update a dummy alert definition to be used by the tests.
func UpdateTestAlertRuleIntervalSeconds(t *testing.T, ctx context.Context, dbstore *store.DBstore, existingRule *models.AlertRule, intervalSeconds int64) *models.AlertRule {
cmd := store.UpdateRuleGroupCmd{
OrgID: 1,
NamespaceUID: "namespace",
RuleGroupConfig: apimodels.PostableRuleGroupConfig{
Name: existingRule.RuleGroup,
Interval: model.Duration(time.Duration(intervalSeconds) * time.Second),
Rules: []apimodels.PostableExtendedRuleNode{
{
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
UID: existingRule.UID,
},
},
},
},
}
err := dbstore.UpdateRuleGroup(ctx, cmd)
require.NoError(t, err)
q := models.ListRuleGroupAlertRulesQuery{
OrgID: 1,
NamespaceUID: "namespace",
RuleGroup: existingRule.RuleGroup,
}
err = dbstore.GetRuleGroupAlertRules(ctx, &q)
require.NoError(t, err)
require.NotEmpty(t, q.Result)
rule := q.Result[0]
t.Logf("alert definition: %v with title: %s and interval: %d created", rule.GetKey(), rule.Title, rule.IntervalSeconds)
return rule
}

View File

@ -908,7 +908,7 @@ func TestAlertRuleCRUD(t *testing.T) {
Data: []ngmodels.AlertQuery{},
},
},
expectedResponse: `{"message": "failed to update rule group: invalid alert rule: no queries or expressions are found"}`,
expectedResponse: `{"message": "invalid rule specification at index [0]: invalid alert rule: no queries or expressions are found"}`,
},
{
desc: "alert rule with empty title",
@ -938,7 +938,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
expectedResponse: `{"message": "failed to update rule group: invalid alert rule: title is empty"}`,
expectedResponse: `{"message": "invalid rule specification at index [0]: alert rule title cannot be empty"}`,
},
{
desc: "alert rule with too long name",
@ -968,7 +968,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
expectedResponse: `{"message": "failed to update rule group: invalid alert rule: name length should not be greater than 190"}`,
expectedResponse: `{"message": "invalid rule specification at index [0]: alert rule title is too long. Max length is 190"}`,
},
{
desc: "alert rule with too long rulegroup",
@ -998,7 +998,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
expectedResponse: `{"message": "failed to update rule group: invalid alert rule: rule group name length should not be greater than 190"}`,
expectedResponse: `{"message": "rule group name is too long. Max length is 190"}`,
},
{
desc: "alert rule with invalid interval",
@ -1029,7 +1029,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
expectedResponse: `{"message": "failed to update rule group: invalid alert rule: interval (1s) should be non-zero and divided exactly by scheduler interval: 10s"}`,
expectedResponse: `{"message": "rule evaluation interval (1 second) should be positive number that is multiple of the base interval of 10 seconds"}`,
},
{
desc: "alert rule with unknown datasource",
@ -1059,7 +1059,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
expectedResponse: `{"message": "failed to validate alert rule \"AlwaysFiring\": invalid query A: data source not found: unknown"}`,
expectedResponse: `{"message": "invalid rule specification at index [0]: failed to validate condition of alert rule AlwaysFiring: invalid query A: data source not found: unknown"}`,
},
{
desc: "alert rule with invalid condition",
@ -1089,7 +1089,7 @@ func TestAlertRuleCRUD(t *testing.T) {
},
},
},
expectedResponse: `{"message": "failed to validate alert rule \"AlwaysFiring\": condition B not found in any query or expression: it should be one of: [A]"}`,
expectedResponse: `{"message": "invalid rule specification at index [0]: failed to validate condition of alert rule AlwaysFiring: condition B not found in any query or expression: it should be one of: [A]"}`,
},
}
@ -1379,7 +1379,7 @@ func TestAlertRuleCRUD(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, http.StatusNotFound, resp.StatusCode)
require.JSONEq(t, `{"message": "failed to update rule group: failed to get alert rule unknown: could not find alert rule"}`, string(b))
require.JSONEq(t, `{"message": "failed to update rule group: failed to update rule with UID unknown because could not find alert rule"}`, string(b))
// let's make sure that rule definitions are not affected by the failed POST request.
u = fmt.Sprintf("http://grafana:password@%s/api/ruler/grafana/api/v1/rules/default", grafanaListedAddr)
@ -1498,7 +1498,7 @@ func TestAlertRuleCRUD(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, http.StatusBadRequest, resp.StatusCode)
require.JSONEq(t, fmt.Sprintf(`{"message": "failed to validate alert rule \"AlwaysAlerting\": conflicting UID \"%s\" found"}`, ruleUID), string(b))
require.JSONEq(t, fmt.Sprintf(`{"message": "rule [1] has UID %s that is already assigned to another rule at index 0"}`, ruleUID), string(b))
// let's make sure that rule definitions are not affected by the failed POST request.
u = fmt.Sprintf("http://grafana:password@%s/api/ruler/grafana/api/v1/rules/default", grafanaListedAddr)
@ -1847,6 +1847,7 @@ func TestAlertRuleCRUD(t *testing.T) {
"rules":[
{
"expr":"",
"for": "30s",
"grafana_alert":{
"id":1,
"orgId":1,
@ -2115,7 +2116,7 @@ func TestQuota(t *testing.T) {
b, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
assert.Equal(t, http.StatusForbidden, resp.StatusCode)
require.JSONEq(t, `{"message": "quota reached"}`, string(b))
require.JSONEq(t, `{"message": "quota has been exceeded"}`, string(b))
})
t.Run("when quota limit exceed updating existing rule should succeed", func(t *testing.T) {
@ -2193,6 +2194,7 @@ func TestQuota(t *testing.T) {
"rules":[
{
"expr":"",
"for": "2m",
"grafana_alert":{
"id":1,
"orgId":1,

View File

@ -10,6 +10,10 @@ import (
"testing"
"time"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/models"
@ -17,9 +21,6 @@ import (
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/tests/testinfra"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestPrometheusRules(t *testing.T) {
@ -208,7 +209,7 @@ func TestPrometheusRules(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, 400, resp.StatusCode)
require.JSONEq(t, `{"message": "failed to update rule group: invalid alert rule: cannot have Panel ID without a Dashboard UID"}`, string(b))
require.JSONEq(t, `{"message": "invalid rule specification at index [0]: both annotations __dashboardUid__ and __panelId__ must be specified"}`, string(b))
}
// Now, let's see how this looks like.

View File

@ -10,6 +10,10 @@ import (
"testing"
"time"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/models"
@ -17,9 +21,6 @@ import (
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/tests/testinfra"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestAlertRulePermissions(t *testing.T) {
@ -432,7 +433,7 @@ func TestAlertRuleConflictingTitle(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, http.StatusInternalServerError, resp.StatusCode)
require.JSONEq(t, `{"message": "failed to update rule group: a conflicting alert rule is found: rule title under the same organisation and folder should be unique"}`, string(b))
require.JSONEq(t, `{"message": "failed to update rule group: failed to add or update rules: a conflicting alert rule is found: rule title under the same organisation and folder should be unique"}`, string(b))
})
t.Run("trying to create alert with same title under another folder should succeed", func(t *testing.T) {