Alerting: Introduce alert rule models in storage (#93187)

* introduce storage model for alert rule tables
* remove AlertRuleVersion from models because it's not used anywhere other than in storage
* update historian xorm store to use alerting store to fetch rules

* fix folder tests

---------

Co-authored-by: Matthew Jacobson <matthew.jacobson@grafana.com>
This commit is contained in:
Yuri Tseretyan 2024-09-12 13:20:33 -04:00 committed by GitHub
parent 0a976f831c
commit f8fa5286a1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 572 additions and 273 deletions

View File

@ -5,6 +5,7 @@ import (
"github.com/grafana/grafana/pkg/services/annotations/accesscontrol"
"github.com/grafana/grafana/pkg/services/annotations/annotationsimpl/loki"
alertingStore "github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
@ -29,6 +30,7 @@ func ProvideService(
features featuremgmt.FeatureToggles,
tagService tag.Service,
tracer tracing.Tracer,
ruleStore *alertingStore.DBstore,
) *RepositoryImpl {
l := log.New("annotations")
l.Debug("Initializing annotations service")
@ -37,7 +39,7 @@ func ProvideService(
write := xormStore
var read readStore
historianStore := loki.NewLokiHistorianStore(cfg.UnifiedAlerting.StateHistory, features, db, log.New("annotations.loki"), tracer)
historianStore := loki.NewLokiHistorianStore(cfg.UnifiedAlerting.StateHistory, features, db, ruleStore, log.New("annotations.loki"), tracer)
if historianStore != nil {
l.Debug("Using composite read store")
read = NewCompositeStore(log.New("annotations.composite"), xormStore, historianStore)

View File

@ -25,7 +25,9 @@ import (
"github.com/grafana/grafana/pkg/services/folder"
"github.com/grafana/grafana/pkg/services/folder/folderimpl"
"github.com/grafana/grafana/pkg/services/guardian"
alertingStore "github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/quota/quotatest"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/services/supportbundles/supportbundlestest"
"github.com/grafana/grafana/pkg/services/tag/tagimpl"
"github.com/grafana/grafana/pkg/services/user"
@ -48,8 +50,9 @@ func TestIntegrationAnnotationListingWithRBAC(t *testing.T) {
features := featuremgmt.WithFeatures()
tagService := tagimpl.ProvideService(sql)
ruleStore := alertingStore.SetupStoreForTesting(t, sql)
repo := ProvideService(sql, cfg, features, tagService, tracing.InitializeTracerForTest())
repo := ProvideService(sql, cfg, features, tagService, tracing.InitializeTracerForTest(), ruleStore)
dashboard1 := testutil.CreateDashboard(t, sql, cfg, features, dashboards.SaveDashboardCommand{
UserID: 1,
@ -207,7 +210,7 @@ func TestIntegrationAnnotationListingWithInheritedRBAC(t *testing.T) {
allDashboards := make([]dashInfo, 0, folder.MaxNestedFolderDepth+1)
annotationsTexts := make([]string, 0, folder.MaxNestedFolderDepth+1)
setupFolderStructure := func() db.DB {
setupFolderStructure := func() *sqlstore.ReplStore {
sql, cfg := db.InitTestReplDBWithCfg(t)
// enable nested folders so that the folder table is populated for all the tests
@ -315,8 +318,8 @@ func TestIntegrationAnnotationListingWithInheritedRBAC(t *testing.T) {
t.Run(tc.desc, func(t *testing.T) {
cfg := setting.NewCfg()
cfg.AnnotationMaximumTagsLength = 60
repo := ProvideService(sql, cfg, tc.features, tagimpl.ProvideService(sql), tracing.InitializeTracerForTest())
ruleStore := alertingStore.SetupStoreForTesting(t, sql)
repo := ProvideService(sql, cfg, tc.features, tagimpl.ProvideService(sql), tracing.InitializeTracerForTest(), ruleStore)
usr.Permissions = map[int64]map[string][]string{1: tc.permissions}
testutil.SetupRBACPermission(t, sql, role, usr)

View File

@ -39,10 +39,12 @@ const (
var (
ErrLokiStoreInternal = errutil.Internal("annotations.loki.internal")
ErrLokiStoreNotFound = errutil.NotFound("annotations.loki.notFound")
errMissingRule = errors.New("rule not found")
)
type RuleStore interface {
GetRuleByID(ctx context.Context, query ngmodels.GetAlertRuleByIDQuery) (result *ngmodels.AlertRule, err error)
}
type lokiQueryClient interface {
RangeQuery(ctx context.Context, query string, start, end, limit int64) (historian.QueryRes, error)
MaxQuerySize() int
@ -50,12 +52,13 @@ type lokiQueryClient interface {
// LokiHistorianStore is a read store that queries Loki for alert state history.
type LokiHistorianStore struct {
client lokiQueryClient
db db.DB
log log.Logger
client lokiQueryClient
db db.DB
log log.Logger
ruleStore RuleStore
}
func NewLokiHistorianStore(cfg setting.UnifiedAlertingStateHistorySettings, ft featuremgmt.FeatureToggles, db db.DB, log log.Logger, tracer tracing.Tracer) *LokiHistorianStore {
func NewLokiHistorianStore(cfg setting.UnifiedAlertingStateHistorySettings, ft featuremgmt.FeatureToggles, db db.DB, ruleStore RuleStore, log log.Logger, tracer tracing.Tracer) *LokiHistorianStore {
if !useStore(cfg, ft) {
return nil
}
@ -66,9 +69,10 @@ func NewLokiHistorianStore(cfg setting.UnifiedAlertingStateHistorySettings, ft f
}
return &LokiHistorianStore{
client: historian.NewLokiClient(lokiCfg, historian.NewRequester(), ngmetrics.NewHistorianMetrics(prometheus.DefaultRegisterer, subsystem), log, tracer),
db: db,
log: log,
client: historian.NewLokiClient(lokiCfg, historian.NewRequester(), ngmetrics.NewHistorianMetrics(prometheus.DefaultRegisterer, subsystem), log, tracer),
db: db,
log: log,
ruleStore: ruleStore,
}
}
@ -90,9 +94,9 @@ func (r *LokiHistorianStore) Get(ctx context.Context, query *annotations.ItemQue
rule := &ngmodels.AlertRule{}
if query.AlertID != 0 {
var err error
rule, err = getRule(ctx, r.db, query.OrgID, query.AlertID)
rule, err = r.ruleStore.GetRuleByID(ctx, ngmodels.GetAlertRuleByIDQuery{OrgID: query.OrgID, ID: query.AlertID})
if err != nil {
if errors.Is(err, errMissingRule) {
if errors.Is(err, ngmodels.ErrAlertRuleNotFound) {
return make([]*annotations.ItemDTO, 0), ErrLokiStoreNotFound.Errorf("rule with ID %d does not exist", query.AlertID)
}
return make([]*annotations.ItemDTO, 0), ErrLokiStoreInternal.Errorf("failed to query rule: %w", err)
@ -194,22 +198,6 @@ func (r *LokiHistorianStore) GetTags(ctx context.Context, query *annotations.Tag
// util
func getRule(ctx context.Context, sql db.DB, orgID int64, ruleID int64) (*ngmodels.AlertRule, error) {
rule := &ngmodels.AlertRule{OrgID: orgID, ID: ruleID}
err := sql.WithDbSession(ctx, func(sess *db.Session) error {
exists, err := sess.Get(rule)
if err != nil {
return err
}
if !exists {
return errMissingRule
}
return nil
})
return rule, err
}
func hasAccess(entry historian.LokiEntry, resources accesscontrol.AccessResources) bool {
orgFilter := resources.CanAccessOrgAnnotations && entry.DashboardUID == ""
dashFilter := func() bool {

View File

@ -3,15 +3,16 @@ package loki
import (
"context"
"encoding/json"
"errors"
"math/rand"
"net/url"
"slices"
"strconv"
"testing"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/require"
"golang.org/x/exp/maps"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/db"
@ -28,6 +29,8 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/state"
"github.com/grafana/grafana/pkg/services/ngalert/state/historian"
historymodel "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tests/testsuite"
)
@ -105,6 +108,35 @@ func TestIntegrationAlertStateHistoryStore(t *testing.T) {
require.Len(t, res, numTransitions)
})
t.Run("should return ErrLokiStoreNotFound if rule is not found", func(t *testing.T) {
var rules = slices.Concat(maps.Values(dashboardRules)...)
id := rand.Int63n(1000) // in Postgres ID is integer, so limit range
// make sure id is not known
for slices.IndexFunc(rules, func(rule *ngmodels.AlertRule) bool {
return rule.ID == id
}) >= 0 {
id = rand.Int63n(1000)
}
query := annotations.ItemQuery{
OrgID: 1,
AlertID: id,
From: start.UnixMilli(),
To: start.Add(time.Second * time.Duration(numTransitions+1)).UnixMilli(),
}
_, err := store.Get(
context.Background(),
&query,
&annotation_ac.AccessResources{
Dashboards: map[string]int64{
dashboard1.UID: dashboard1.ID,
},
CanAccessDashAnnotations: true,
},
)
require.ErrorIs(t, err, ErrLokiStoreNotFound)
})
t.Run("can query history by dashboard id", func(t *testing.T) {
fakeLokiClient.rangeQueryRes = []historian.Stream{
historian.StatesToStream(ruleMetaFromRule(t, dashboardRules[dashboard1.UID][0]), transitions, map[string]string{}, log.NewNopLogger()),
@ -572,19 +604,21 @@ func TestBuildTransition(t *testing.T) {
})
}
func createTestLokiStore(t *testing.T, sql db.DB, client lokiQueryClient) *LokiHistorianStore {
func createTestLokiStore(t *testing.T, sql *sqlstore.ReplStore, client lokiQueryClient) *LokiHistorianStore {
t.Helper()
ruleStore := store.SetupStoreForTesting(t, sql)
return &LokiHistorianStore{
client: client,
db: sql,
log: log.NewNopLogger(),
client: client,
db: sql,
log: log.NewNopLogger(),
ruleStore: ruleStore,
}
}
// createAlertRule creates an alert rule in the database and returns it.
// If a generator is not specified, uniqueness of primary key is not guaranteed.
func createAlertRule(t *testing.T, sql db.DB, title string, generator *ngmodels.AlertRuleGenerator) *ngmodels.AlertRule {
func createAlertRule(t *testing.T, sql *sqlstore.ReplStore, title string, generator *ngmodels.AlertRuleGenerator) *ngmodels.AlertRule {
t.Helper()
if generator == nil {
@ -592,7 +626,7 @@ func createAlertRule(t *testing.T, sql db.DB, title string, generator *ngmodels.
generator = g.With(g.WithTitle(title), g.WithDashboardAndPanel(nil, nil), g.WithOrgID(1))
}
rule := generator.GenerateRef()
rule := generator.Generate()
// ensure rule has correct values
if rule.Title != title {
rule.Title = title
@ -601,32 +635,17 @@ func createAlertRule(t *testing.T, sql db.DB, title string, generator *ngmodels.
rule.DashboardUID = nil
rule.PanelID = nil
err := sql.WithDbSession(context.Background(), func(sess *db.Session) error {
_, err := sess.Table(ngmodels.AlertRule{}).InsertOne(rule)
if err != nil {
return err
}
dbRule := &ngmodels.AlertRule{}
exist, err := sess.Table(ngmodels.AlertRule{}).ID(rule.ID).Get(dbRule)
if err != nil {
return err
}
if !exist {
return errors.New("cannot read inserted record")
}
rule = dbRule
return nil
})
ruleStore := store.SetupStoreForTesting(t, sql)
ids, err := ruleStore.InsertAlertRules(context.Background(), []ngmodels.AlertRule{rule})
require.NoError(t, err)
return rule
result, err := ruleStore.GetAlertRuleByUID(context.Background(), &ngmodels.GetAlertRuleByUIDQuery{OrgID: rule.OrgID, UID: ids[0].UID})
require.NoError(t, err)
return result
}
// createAlertRuleFromDashboard creates an alert rule with a linked dashboard and panel in the database and returns it.
// If a generator is not specified, uniqueness of primary key is not guaranteed.
func createAlertRuleFromDashboard(t *testing.T, sql db.DB, title string, dashboard dashboards.Dashboard, generator *ngmodels.AlertRuleGenerator) *ngmodels.AlertRule {
func createAlertRuleFromDashboard(t *testing.T, sql *sqlstore.ReplStore, title string, dashboard dashboards.Dashboard, generator *ngmodels.AlertRuleGenerator) *ngmodels.AlertRule {
t.Helper()
panelID := new(int64)
@ -637,7 +656,7 @@ func createAlertRuleFromDashboard(t *testing.T, sql db.DB, title string, dashboa
generator = g.With(g.WithTitle(title), g.WithDashboardAndPanel(&dashboard.UID, panelID), g.WithOrgID(1))
}
rule := generator.GenerateRef()
rule := generator.Generate()
// ensure rule has correct values
if rule.Title != title {
rule.Title = title
@ -648,28 +667,12 @@ func createAlertRuleFromDashboard(t *testing.T, sql db.DB, title string, dashboa
if rule.PanelID == nil || (rule.PanelID != nil && *rule.PanelID != *panelID) {
rule.PanelID = panelID
}
err := sql.WithDbSession(context.Background(), func(sess *db.Session) error {
_, err := sess.Table(ngmodels.AlertRule{}).InsertOne(rule)
if err != nil {
return err
}
dbRule := &ngmodels.AlertRule{}
exist, err := sess.Table(ngmodels.AlertRule{}).ID(rule.ID).Get(dbRule)
if err != nil {
return err
}
if !exist {
return errors.New("cannot read inserted record")
}
rule = dbRule
return nil
})
ruleStore := store.SetupStoreForTesting(t, sql)
ids, err := ruleStore.InsertAlertRules(context.Background(), []ngmodels.AlertRule{rule})
require.NoError(t, err)
return rule
result, err := ruleStore.GetAlertRuleByUID(context.Background(), &ngmodels.GetAlertRuleByUIDQuery{OrgID: rule.OrgID, UID: ids[0].UID})
require.NoError(t, err)
return result
}
func ruleMetaFromRule(t *testing.T, rule *ngmodels.AlertRule) historymodel.RuleMeta {

View File

@ -419,6 +419,7 @@ func TestIntegrationNestedFolderService(t *testing.T) {
t.Skip("skipping integration test")
}
db, cfg := sqlstore.InitTestReplDB(t)
cfg.UnifiedAlerting.BaseInterval = time.Second
quotaService := quotatest.New(false, nil)
folderStore := ProvideDashboardFolderStore(db)
@ -2538,24 +2539,19 @@ func setup(t *testing.T, dashStore dashboards.Store, dashboardFolderStore folder
func createRule(t *testing.T, store *ngstore.DBstore, folderUID, title string) *models.AlertRule {
t.Helper()
rule := models.AlertRule{
OrgID: orgID,
NamespaceUID: folderUID,
Title: title,
Updated: time.Now(),
UID: util.GenerateShortUID(),
}
err := store.SQLStore.WithDbSession(context.Background(), func(sess *db.Session) error {
_, err := sess.Table(models.AlertRule{}).InsertOne(rule)
if err != nil {
return err
}
return nil
})
gen := models.RuleGen
rule := gen.With(
gen.WithOrgID(orgID),
gen.WithTitle(title),
gen.WithNamespaceUID(folderUID),
gen.WithIntervalSeconds(10),
).Generate()
ids, err := store.InsertAlertRules(context.Background(), []models.AlertRule{rule})
require.NoError(t, err)
return &rule
result, err := store.GetAlertRuleByUID(context.Background(), &models.GetAlertRuleByUIDQuery{OrgID: orgID, UID: ids[0].UID})
require.NoError(t, err)
return result
}
func TestSplitFullpath(t *testing.T) {

View File

@ -245,21 +245,21 @@ func SortAlertRuleGroupWithFolderTitle(g []AlertRuleGroupWithFolderFullpath) {
// AlertRule is the model for alert rules in unified alerting.
type AlertRule struct {
ID int64 `xorm:"pk autoincr 'id'"`
OrgID int64 `xorm:"org_id"`
ID int64
OrgID int64
Title string
Condition string
Data []AlertQuery
Updated time.Time
IntervalSeconds int64
Version int64 `xorm:"version"` // this tag makes xorm add optimistic lock (see https://xorm.io/docs/chapter-06/1.lock/)
UID string `xorm:"uid"`
NamespaceUID string `xorm:"namespace_uid"`
DashboardUID *string `xorm:"dashboard_uid"`
PanelID *int64 `xorm:"panel_id"`
Version int64
UID string
NamespaceUID string
DashboardUID *string
PanelID *int64
RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
Record *Record `xorm:"json"`
RuleGroupIndex int
Record *Record
NoDataState NoDataState
ExecErrState ExecutionErrorState
// ideally this field should have been apimodels.ApiDuration
@ -268,7 +268,7 @@ type AlertRule struct {
Annotations map[string]string
Labels map[string]string
IsPaused bool
NotificationSettings []NotificationSettings `xorm:"notification_settings"` // we use slice to workaround xorm mapping that does not serialize a struct to JSON unless it's a slice
NotificationSettings []NotificationSettings
}
// Namespaced describes a class of resources that are stored in a specific namespace.
@ -630,41 +630,18 @@ func (alertRule *AlertRule) Type() RuleType {
return RuleTypeAlerting
}
// AlertRuleVersion is the model for alert rule versions in unified alerting.
type AlertRuleVersion struct {
ID int64 `xorm:"pk autoincr 'id'"`
RuleOrgID int64 `xorm:"rule_org_id"`
RuleUID string `xorm:"rule_uid"`
RuleNamespaceUID string `xorm:"rule_namespace_uid"`
RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
ParentVersion int64
RestoredFrom int64
Version int64
Created time.Time
Title string
Condition string
Data []AlertQuery
IntervalSeconds int64
Record *Record `xorm:"json"`
NoDataState NoDataState
ExecErrState ExecutionErrorState
// ideally this field should have been apimodels.ApiDuration
// but this is currently not possible because of circular dependencies
For time.Duration
Annotations map[string]string
Labels map[string]string
IsPaused bool
NotificationSettings []NotificationSettings `xorm:"notification_settings"` // we use slice to workaround xorm mapping that does not serialize a struct to JSON unless it's a slice
}
// GetAlertRuleByUIDQuery is the query for retrieving/deleting an alert rule by UID and organisation ID.
type GetAlertRuleByUIDQuery struct {
UID string
OrgID int64
}
// GetAlertRuleByIDQuery is the query for retrieving/deleting an alert rule by ID and organisation ID.
type GetAlertRuleByIDQuery struct {
ID int64
OrgID int64
}
// GetAlertRulesGroupByRuleUIDQuery is the query for retrieving a group of alerts by UID of a rule that belongs to that group
type GetAlertRulesGroupByRuleUIDQuery struct {
UID string

View File

@ -37,36 +37,23 @@ var (
ErrOptimisticLock = errors.New("version conflict while updating a record in the database with optimistic locking")
)
func getAlertRuleByUID(sess *db.Session, alertRuleUID string, orgID int64) (*ngmodels.AlertRule, error) {
// we consider optionally enabling some caching
alertRule := ngmodels.AlertRule{OrgID: orgID, UID: alertRuleUID}
has, err := sess.Get(&alertRule)
if err != nil {
return nil, err
}
if !has {
return nil, ngmodels.ErrAlertRuleNotFound
}
return &alertRule, nil
}
// DeleteAlertRulesByUID is a handler for deleting an alert rule.
func (st DBstore) DeleteAlertRulesByUID(ctx context.Context, orgID int64, ruleUID ...string) error {
logger := st.Logger.New("org_id", orgID, "rule_uids", ruleUID)
return st.SQLStore.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
rows, err := sess.Table("alert_rule").Where("org_id = ?", orgID).In("uid", ruleUID).Delete(ngmodels.AlertRule{})
rows, err := sess.Table(alertRule{}).Where("org_id = ?", orgID).In("uid", ruleUID).Delete(alertRule{})
if err != nil {
return err
}
logger.Debug("Deleted alert rules", "count", rows)
rows, err = sess.Table("alert_rule_version").Where("rule_org_id = ?", orgID).In("rule_uid", ruleUID).Delete(ngmodels.AlertRule{})
rows, err = sess.Table(alertRuleVersion{}).Where("rule_org_id = ?", orgID).In("rule_uid", ruleUID).Delete(alertRule{})
if err != nil {
return err
}
logger.Debug("Deleted alert rule versions", "count", rows)
rows, err = sess.Table("alert_instance").Where("rule_org_id = ?", orgID).In("rule_uid", ruleUID).Delete(ngmodels.AlertRule{})
rows, err = sess.Table("alert_instance").Where("rule_org_id = ?", orgID).In("rule_uid", ruleUID).Delete(alertRule{})
if err != nil {
return err
}
@ -94,7 +81,7 @@ func (st DBstore) IncreaseVersionForAllRulesInNamespaces(ctx context.Context, or
return err
}
return sess.Table(ngmodels.AlertRule{}).Where("org_id = ?", orgID).In("namespace_uid", namespaceUIDs).Find(&keys)
return sess.Table(alertRule{}).Where("org_id = ?", orgID).In("namespace_uid", namespaceUIDs).Find(&keys)
})
return keys, err
}
@ -103,11 +90,41 @@ func (st DBstore) IncreaseVersionForAllRulesInNamespaces(ctx context.Context, or
// It returns ngmodels.ErrAlertRuleNotFound if no alert rule is found for the provided ID.
func (st DBstore) GetAlertRuleByUID(ctx context.Context, query *ngmodels.GetAlertRuleByUIDQuery) (result *ngmodels.AlertRule, err error) {
err = st.SQLStore.WithDbSession(ctx, func(sess *db.Session) error {
alertRule, err := getAlertRuleByUID(sess, query.UID, query.OrgID)
alertRule := alertRule{OrgID: query.OrgID, UID: query.UID}
has, err := sess.Get(&alertRule)
if err != nil {
return err
}
result = alertRule
if !has {
return ngmodels.ErrAlertRuleNotFound
}
r, err := alertRuleToModelsAlertRule(alertRule, st.Logger)
if err != nil {
return fmt.Errorf("failed to convert alert rule: %w", err)
}
result = &r
return nil
})
return result, err
}
// GetRuleByID retrieves models.AlertRule by ID.
// It returns models.ErrAlertRuleNotFound if no alert rule is found for the provided ID.
func (st DBstore) GetRuleByID(ctx context.Context, query ngmodels.GetAlertRuleByIDQuery) (result *ngmodels.AlertRule, err error) {
err = st.SQLStore.WithDbSession(ctx, func(sess *db.Session) error {
alertRule := alertRule{OrgID: query.OrgID, ID: query.ID}
has, err := sess.Get(&alertRule)
if err != nil {
return err
}
if !has {
return ngmodels.ErrAlertRuleNotFound
}
r, err := alertRuleToModelsAlertRule(alertRule, st.Logger)
if err != nil {
return fmt.Errorf("failed to convert alert rule: %w", err)
}
result = &r
return nil
})
return result, err
@ -116,7 +133,7 @@ func (st DBstore) GetAlertRuleByUID(ctx context.Context, query *ngmodels.GetAler
// GetAlertRulesGroupByRuleUID is a handler for retrieving a group of alert rules from that database by UID and organisation ID of one of rules that belong to that group.
func (st DBstore) GetAlertRulesGroupByRuleUID(ctx context.Context, query *ngmodels.GetAlertRulesGroupByRuleUIDQuery) (result []*ngmodels.AlertRule, err error) {
err = st.SQLStore.WithDbSession(ctx, func(sess *db.Session) error {
var rules []*ngmodels.AlertRule
var rules []alertRule
err := sess.Table("alert_rule").Alias("a").Join(
"INNER",
"alert_rule AS b", "a.org_id = b.org_id AND a.namespace_uid = b.namespace_uid AND a.rule_group = b.rule_group AND b.uid = ?", query.UID,
@ -125,11 +142,12 @@ func (st DBstore) GetAlertRulesGroupByRuleUID(ctx context.Context, query *ngmode
return err
}
// MySQL by default compares strings without case-sensitivity, make sure we keep the case-sensitive comparison.
var groupKey ngmodels.AlertRuleGroupKey
var groupName, namespaceUID string
// find the rule, which group we fetch
for _, rule := range rules {
if rule.UID == query.UID {
groupKey = rule.GetGroupKey()
groupName = rule.RuleGroup
namespaceUID = rule.NamespaceUID
break
}
}
@ -137,9 +155,14 @@ func (st DBstore) GetAlertRulesGroupByRuleUID(ctx context.Context, query *ngmode
// MySQL (and potentially other databases) can use case-insensitive comparison.
// This code makes sure we return groups that only exactly match the filter.
for _, rule := range rules {
if rule.GetGroupKey() == groupKey {
result = append(result, rule)
if rule.RuleGroup != groupName || rule.NamespaceUID != namespaceUID {
continue
}
convert, err := alertRuleToModelsAlertRule(rule, st.Logger)
if err != nil {
return fmt.Errorf("failed to convert alert rule %q: %w", rule.UID, err)
}
result = append(result, &convert)
}
return nil
})
@ -151,8 +174,8 @@ func (st DBstore) GetAlertRulesGroupByRuleUID(ctx context.Context, query *ngmode
func (st DBstore) InsertAlertRules(ctx context.Context, rules []ngmodels.AlertRule) ([]ngmodels.AlertRuleKeyWithId, error) {
ids := make([]ngmodels.AlertRuleKeyWithId, 0, len(rules))
return ids, st.SQLStore.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
newRules := make([]ngmodels.AlertRule, 0, len(rules))
ruleVersions := make([]ngmodels.AlertRuleVersion, 0, len(rules))
newRules := make([]alertRule, 0, len(rules))
ruleVersions := make([]alertRuleVersion, 0, len(rules))
for i := range rules {
r := rules[i]
if r.UID == "" {
@ -169,27 +192,13 @@ func (st DBstore) InsertAlertRules(ctx context.Context, rules []ngmodels.AlertRu
if err := (&r).PreSave(TimeNow); err != nil {
return err
}
newRules = append(newRules, r)
ruleVersions = append(ruleVersions, ngmodels.AlertRuleVersion{
RuleUID: r.UID,
RuleOrgID: r.OrgID,
RuleNamespaceUID: r.NamespaceUID,
RuleGroup: r.RuleGroup,
ParentVersion: 0,
Version: r.Version,
Created: r.Updated,
Condition: r.Condition,
Title: r.Title,
Data: r.Data,
IntervalSeconds: r.IntervalSeconds,
NoDataState: r.NoDataState,
ExecErrState: r.ExecErrState,
For: r.For,
Annotations: r.Annotations,
Labels: r.Labels,
Record: r.Record,
NotificationSettings: r.NotificationSettings,
})
converted, err := alertRuleFromModelsAlertRule(r)
if err != nil {
return fmt.Errorf("failed to convert alert rule %q to storage model: %w", r.Title, err)
}
newRules = append(newRules, converted)
ruleVersions = append(ruleVersions, alertRuleToAlertRuleVersion(converted))
}
if len(newRules) > 0 {
// we have to insert the rules one by one as otherwise we are
@ -197,13 +206,17 @@ func (st DBstore) InsertAlertRules(ctx context.Context, rules []ngmodels.AlertRu
for i := range newRules {
if _, err := sess.Insert(&newRules[i]); err != nil {
if st.SQLStore.GetDialect().IsUniqueConstraintViolation(err) {
return ruleConstraintViolationToErr(newRules[i], err)
return ruleConstraintViolationToErr(rules[i], err)
}
return fmt.Errorf("failed to create new rules: %w", err)
}
r := newRules[i]
ids = append(ids, ngmodels.AlertRuleKeyWithId{
AlertRuleKey: newRules[i].GetKey(),
ID: newRules[i].ID,
AlertRuleKey: ngmodels.AlertRuleKey{
OrgID: r.OrgID,
UID: r.UID,
},
ID: r.ID,
})
}
}
@ -225,12 +238,11 @@ func (st DBstore) UpdateAlertRules(ctx context.Context, rules []ngmodels.UpdateR
return fmt.Errorf("failed when preventing intermediate unique constraint violation: %w", err)
}
ruleVersions := make([]ngmodels.AlertRuleVersion, 0, len(rules))
ruleVersions := make([]alertRuleVersion, 0, len(rules))
for i := range rules {
// We do indexed access way to avoid "G601: Implicit memory aliasing in for loop."
// Doing this will be unnecessary with go 1.22 https://stackoverflow.com/a/68247837/767660
r := rules[i]
var parentVersion int64
r.New.ID = r.Existing.ID
r.New.Version = r.Existing.Version // xorm will take care of increasing it (see https://xorm.io/docs/chapter-06/1.lock/)
if err := st.validateAlertRule(r.New); err != nil {
@ -239,8 +251,12 @@ func (st DBstore) UpdateAlertRules(ctx context.Context, rules []ngmodels.UpdateR
if err := (&r.New).PreSave(TimeNow); err != nil {
return err
}
converted, err := alertRuleFromModelsAlertRule(r.New)
if err != nil {
return fmt.Errorf("failed to convert alert rule %s to storage model: %w", r.New.UID, err)
}
// no way to update multiple rules at once
if updated, err := sess.ID(r.Existing.ID).AllCols().Update(r.New); err != nil || updated == 0 {
if updated, err := sess.ID(r.Existing.ID).AllCols().Update(converted); err != nil || updated == 0 {
if err != nil {
if st.SQLStore.GetDialect().IsUniqueConstraintViolation(err) {
return ruleConstraintViolationToErr(r.New, err)
@ -249,28 +265,10 @@ func (st DBstore) UpdateAlertRules(ctx context.Context, rules []ngmodels.UpdateR
}
return fmt.Errorf("%w: alert rule UID %s version %d", ErrOptimisticLock, r.New.UID, r.New.Version)
}
parentVersion = r.Existing.Version
ruleVersions = append(ruleVersions, ngmodels.AlertRuleVersion{
RuleOrgID: r.New.OrgID,
RuleUID: r.New.UID,
RuleNamespaceUID: r.New.NamespaceUID,
RuleGroup: r.New.RuleGroup,
RuleGroupIndex: r.New.RuleGroupIndex,
ParentVersion: parentVersion,
Version: r.New.Version + 1,
Created: r.New.Updated,
Condition: r.New.Condition,
Title: r.New.Title,
Data: r.New.Data,
IntervalSeconds: r.New.IntervalSeconds,
NoDataState: r.New.NoDataState,
ExecErrState: r.New.ExecErrState,
Record: r.New.Record,
For: r.New.For,
Annotations: r.New.Annotations,
Labels: r.New.Labels,
NotificationSettings: r.New.NotificationSettings,
})
v := alertRuleToAlertRuleVersion(converted)
v.Version++
v.ParentVersion = r.Existing.Version
ruleVersions = append(ruleVersions, v)
}
if len(ruleVersions) > 0 {
if _, err := sess.Insert(&ruleVersions); err != nil {
@ -319,7 +317,7 @@ func (st DBstore) preventIntermediateUniqueConstraintViolations(sess *db.Session
uniqueTempTitle = r.Title[:AlertRuleMaxTitleLength-len(u)] + uuid.New().String()
}
if updated, err := sess.ID(r.ID).Cols("title").Update(&ngmodels.AlertRule{Title: uniqueTempTitle, Version: r.Version}); err != nil || updated == 0 {
if updated, err := sess.ID(r.ID).Cols("title").Update(&alertRule{Title: uniqueTempTitle, Version: r.Version}); err != nil || updated == 0 {
if err != nil {
return fmt.Errorf("failed to set temporary rule title [%s] %s: %w", r.UID, r.Title, err)
}
@ -425,7 +423,7 @@ func (st DBstore) ListAlertRules(ctx context.Context, query *ngmodels.ListAlertR
q = q.Asc("namespace_uid", "rule_group", "rule_group_idx", "id")
alertRules := make([]*ngmodels.AlertRule, 0)
rule := new(ngmodels.AlertRule)
rule := new(alertRule)
rows, err := q.Rows(rule)
if err != nil {
return err
@ -436,21 +434,26 @@ func (st DBstore) ListAlertRules(ctx context.Context, query *ngmodels.ListAlertR
// Deserialize each rule separately in case any of them contain invalid JSON.
for rows.Next() {
rule := new(ngmodels.AlertRule)
rule := new(alertRule)
err = rows.Scan(rule)
if err != nil {
st.Logger.Error("Invalid rule found in DB store, ignoring it", "func", "ListAlertRules", "error", err)
continue
}
converted, err := alertRuleToModelsAlertRule(*rule, st.Logger)
if err != nil {
st.Logger.Error("Invalid rule found in DB store, cannot convert, ignoring it", "func", "ListAlertRules", "error", err)
continue
}
if query.ReceiverName != "" { // remove false-positive hits from the result
if !slices.ContainsFunc(rule.NotificationSettings, func(settings ngmodels.NotificationSettings) bool {
if !slices.ContainsFunc(converted.NotificationSettings, func(settings ngmodels.NotificationSettings) bool {
return settings.Receiver == query.ReceiverName
}) {
continue
}
}
if query.TimeIntervalName != "" {
if !slices.ContainsFunc(rule.NotificationSettings, func(settings ngmodels.NotificationSettings) bool {
if !slices.ContainsFunc(converted.NotificationSettings, func(settings ngmodels.NotificationSettings) bool {
return slices.Contains(settings.MuteTimeIntervals, query.TimeIntervalName)
}) {
continue
@ -459,11 +462,11 @@ func (st DBstore) ListAlertRules(ctx context.Context, query *ngmodels.ListAlertR
// MySQL (and potentially other databases) can use case-insensitive comparison.
// This code makes sure we return groups that only exactly match the filter.
if groupsMap != nil {
if _, ok := groupsMap[rule.RuleGroup]; !ok {
if _, ok := groupsMap[converted.RuleGroup]; !ok {
continue
}
}
alertRules = append(alertRules, rule)
alertRules = append(alertRules, &converted)
}
result = alertRules
@ -498,10 +501,10 @@ func (st DBstore) Count(ctx context.Context, orgID int64) (int64, error) {
func (st DBstore) GetRuleGroupInterval(ctx context.Context, orgID int64, namespaceUID string, ruleGroup string) (int64, error) {
var interval int64 = 0
return interval, st.SQLStore.WithDbSession(ctx, func(sess *db.Session) error {
ruleGroups := make([]ngmodels.AlertRule, 0)
ruleGroups := make([]alertRule, 0)
err := sess.Find(
&ruleGroups,
ngmodels.AlertRule{OrgID: orgID, RuleGroup: ruleGroup, NamespaceUID: namespaceUID},
alertRule{OrgID: orgID, RuleGroup: ruleGroup, NamespaceUID: namespaceUID},
)
if len(ruleGroups) == 0 {
return ngmodels.ErrAlertRuleGroupNotFound.Errorf("")
@ -587,7 +590,7 @@ func (st DBstore) GetAlertRulesForScheduling(ctx context.Context, query *ngmodel
}
}
rule := new(ngmodels.AlertRule)
rule := new(alertRule)
rows, err := alertRulesSql.Rows(rule)
if err != nil {
return fmt.Errorf("failed to fetch alert rules: %w", err)
@ -599,27 +602,32 @@ func (st DBstore) GetAlertRulesForScheduling(ctx context.Context, query *ngmodel
}()
// Deserialize each rule separately in case any of them contain invalid JSON.
for rows.Next() {
rule := new(ngmodels.AlertRule)
rule := new(alertRule)
err = rows.Scan(rule)
if err != nil {
st.Logger.Error("Invalid rule found in DB store, ignoring it", "func", "GetAlertRulesForScheduling", "error", err)
continue
}
converted, err := alertRuleToModelsAlertRule(*rule, st.Logger)
if err != nil {
st.Logger.Error("Invalid rule found in DB store, cannot convert it", "func", "GetAlertRulesForScheduling", "error", err)
continue
}
// MySQL (and potentially other databases) uses case-insensitive comparison.
// This code makes sure we return groups that only exactly match the filter
if groupsMap != nil {
if _, ok := groupsMap[rule.RuleGroup]; !ok { // compare groups using case-sensitive logic.
if _, ok := groupsMap[converted.RuleGroup]; !ok { // compare groups using case-sensitive logic.
continue
}
}
if st.FeatureToggles.IsEnabled(ctx, featuremgmt.FlagAlertingQueryOptimization) {
if optimizations, err := OptimizeAlertQueries(rule.Data); err != nil {
if optimizations, err := OptimizeAlertQueries(converted.Data); err != nil {
st.Logger.Error("Could not migrate rule from range to instant query", "rule", rule.UID, "err", err)
} else if len(optimizations) > 0 {
st.Logger.Info("Migrated rule from range to instant query", "rule", rule.UID, "migrated_queries", len(optimizations))
}
}
rules = append(rules, rule)
rules = append(rules, &converted)
}
query.ResultRules = rules
@ -707,7 +715,7 @@ var GenerateNewAlertRuleUID = func(sess *db.Session, orgID int64, ruleTitle stri
for i := 0; i < 3; i++ {
uid := util.GenerateShortUID()
exists, err := sess.Where("org_id=? AND uid=?", orgID, uid).Get(&ngmodels.AlertRule{})
exists, err := sess.Where("org_id=? AND uid=?", orgID, uid).Get(&alertRule{})
if err != nil {
return "", err
}
@ -741,9 +749,9 @@ func (st DBstore) validateAlertRule(alertRule ngmodels.AlertRule) error {
// ListNotificationSettings fetches all notification settings for given organization
func (st DBstore) ListNotificationSettings(ctx context.Context, q ngmodels.ListNotificationSettingsQuery) (map[ngmodels.AlertRuleKey][]ngmodels.NotificationSettings, error) {
var rules []ngmodels.AlertRule
var rules []alertRule
err := st.SQLStore.WithDbSession(ctx, func(sess *db.Session) error {
query := sess.Table(ngmodels.AlertRule{}).Select("uid, notification_settings").Where("org_id = ?", q.OrgID)
query := sess.Table(alertRule{}).Select("uid, notification_settings").Where("org_id = ?", q.OrgID)
hasFilter := false
if q.ReceiverName != "" {
var err error
@ -762,7 +770,7 @@ func (st DBstore) ListNotificationSettings(ctx context.Context, q ngmodels.ListN
hasFilter = true
}
if !hasFilter {
query = query.And("notification_settings IS NOT NULL AND notification_settings <> 'null'")
query = query.And("notification_settings IS NOT NULL AND notification_settings <> 'null' AND notification_settings <> ''")
}
return query.Find(&rules)
})
@ -771,8 +779,15 @@ func (st DBstore) ListNotificationSettings(ctx context.Context, q ngmodels.ListN
}
result := make(map[ngmodels.AlertRuleKey][]ngmodels.NotificationSettings, len(rules))
for _, rule := range rules {
if rule.NotificationSettings == "" {
continue
}
converted, err := parseNotificationSettings(rule.NotificationSettings)
if err != nil {
return nil, fmt.Errorf("failed to convert notification settings %s to models: %w", rule.UID, err)
}
ns := make([]ngmodels.NotificationSettings, 0, len(rule.NotificationSettings))
for _, setting := range rule.NotificationSettings {
for _, setting := range converted {
if q.ReceiverName != "" && q.ReceiverName != setting.Receiver { // currently, there can be only one setting. If in future there are more, we will return all settings of a rule that has a setting with receiver
continue
}
@ -786,7 +801,7 @@ func (st DBstore) ListNotificationSettings(ctx context.Context, q ngmodels.ListN
OrgID: q.OrgID,
UID: rule.UID,
}
result[key] = rule.NotificationSettings
result[key] = ns
}
}
return result, nil
@ -929,8 +944,8 @@ func ruleConstraintViolationToErr(rule ngmodels.AlertRule, err error) error {
func (st DBstore) GetNamespacesByRuleUID(ctx context.Context, orgID int64, uids ...string) (map[string]string, error) {
result := make(map[string]string)
err := st.SQLStore.WithDbSession(ctx, func(sess *db.Session) error {
var rules []ngmodels.AlertRule
err := sess.Table(ngmodels.AlertRule{}).Select("uid, namespace_uid").Where("org_id = ?", orgID).In("uid", uids).Find(&rules)
var rules []alertRule
err := sess.Table(alertRule{}).Select("uid, namespace_uid").Where("org_id = ?", orgID).In("uid", uids).Find(&rules)
if err != nil {
return err
}

View File

@ -63,9 +63,9 @@ func TestIntegrationUpdateAlertRules(t *testing.T) {
})
require.NoError(t, err)
dbrule := &models.AlertRule{}
dbrule := &alertRule{}
err = sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
exist, err := sess.Table(models.AlertRule{}).ID(rule.ID).Get(dbrule)
exist, err := sess.Table(alertRule{}).ID(rule.ID).Get(dbrule)
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule.ID))
return err
})
@ -86,9 +86,9 @@ func TestIntegrationUpdateAlertRules(t *testing.T) {
})
require.NoError(t, err)
dbrule := &models.AlertRule{}
dbrule := &alertRule{}
err = sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
exist, err := sess.Table(models.AlertRule{}).ID(rule.ID).Get(dbrule)
exist, err := sess.Table(alertRule{}).ID(rule.ID).Get(dbrule)
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule.ID))
return err
})
@ -157,16 +157,16 @@ func TestIntegrationUpdateAlertRulesWithUniqueConstraintViolation(t *testing.T)
})
require.NoError(t, err)
dbrule1 := &models.AlertRule{}
dbrule2 := &models.AlertRule{}
dbrule1 := &alertRule{}
dbrule2 := &alertRule{}
err = sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
exist, err := sess.Table(models.AlertRule{}).ID(rule1.ID).Get(dbrule1)
exist, err := sess.Table(alertRule{}).ID(rule1.ID).Get(dbrule1)
if err != nil {
return err
}
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule1.ID))
exist, err = sess.Table(models.AlertRule{}).ID(rule2.ID).Get(dbrule2)
exist, err = sess.Table(alertRule{}).ID(rule2.ID).Get(dbrule2)
if err != nil {
return err
}
@ -204,23 +204,23 @@ func TestIntegrationUpdateAlertRulesWithUniqueConstraintViolation(t *testing.T)
})
require.NoError(t, err)
dbrule1 := &models.AlertRule{}
dbrule2 := &models.AlertRule{}
dbrule3 := &models.AlertRule{}
dbrule1 := &alertRule{}
dbrule2 := &alertRule{}
dbrule3 := &alertRule{}
err = sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
exist, err := sess.Table(models.AlertRule{}).ID(rule1.ID).Get(dbrule1)
exist, err := sess.Table(alertRule{}).ID(rule1.ID).Get(dbrule1)
if err != nil {
return err
}
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule1.ID))
exist, err = sess.Table(models.AlertRule{}).ID(rule2.ID).Get(dbrule2)
exist, err = sess.Table(alertRule{}).ID(rule2.ID).Get(dbrule2)
if err != nil {
return err
}
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule2.ID))
exist, err = sess.Table(models.AlertRule{}).ID(rule3.ID).Get(dbrule3)
exist, err = sess.Table(alertRule{}).ID(rule3.ID).Get(dbrule3)
if err != nil {
return err
}
@ -253,16 +253,16 @@ func TestIntegrationUpdateAlertRulesWithUniqueConstraintViolation(t *testing.T)
})
require.NoError(t, err)
dbrule1 := &models.AlertRule{}
dbrule2 := &models.AlertRule{}
dbrule1 := &alertRule{}
dbrule2 := &alertRule{}
err = sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
exist, err := sess.Table(models.AlertRule{}).ID(rule1.ID).Get(dbrule1)
exist, err := sess.Table(alertRule{}).ID(rule1.ID).Get(dbrule1)
if err != nil {
return err
}
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule1.ID))
exist, err = sess.Table(models.AlertRule{}).ID(rule2.ID).Get(dbrule2)
exist, err = sess.Table(alertRule{}).ID(rule2.ID).Get(dbrule2)
if err != nil {
return err
}
@ -306,30 +306,30 @@ func TestIntegrationUpdateAlertRulesWithUniqueConstraintViolation(t *testing.T)
})
require.NoError(t, err)
dbrule1 := &models.AlertRule{}
dbrule2 := &models.AlertRule{}
dbrule3 := &models.AlertRule{}
dbrule4 := &models.AlertRule{}
dbrule1 := &alertRule{}
dbrule2 := &alertRule{}
dbrule3 := &alertRule{}
dbrule4 := &alertRule{}
err = sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
exist, err := sess.Table(models.AlertRule{}).ID(rule1.ID).Get(dbrule1)
exist, err := sess.Table(alertRule{}).ID(rule1.ID).Get(dbrule1)
if err != nil {
return err
}
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule1.ID))
exist, err = sess.Table(models.AlertRule{}).ID(rule2.ID).Get(dbrule2)
exist, err = sess.Table(alertRule{}).ID(rule2.ID).Get(dbrule2)
if err != nil {
return err
}
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule2.ID))
exist, err = sess.Table(models.AlertRule{}).ID(rule3.ID).Get(dbrule3)
exist, err = sess.Table(alertRule{}).ID(rule3.ID).Get(dbrule3)
if err != nil {
return err
}
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", rule3.ID))
exist, err = sess.Table(models.AlertRule{}).ID(rule4.ID).Get(dbrule4)
exist, err = sess.Table(alertRule{}).ID(rule4.ID).Get(dbrule4)
if err != nil {
return err
}
@ -379,6 +379,7 @@ func TestIntegration_GetAlertRulesForScheduling(t *testing.T) {
sqlStore := db.InitTestReplDB(t)
store := &DBstore{
Logger: &logtest.Fake{},
SQLStore: sqlStore,
Cfg: cfg.UnifiedAlerting,
FolderService: setupFolderService(t, sqlStore, cfg, featuremgmt.WithFeatures()),
@ -1299,7 +1300,7 @@ func TestIncreaseVersionForAllRulesInNamespaces(t *testing.T) {
gen := models.RuleGen
gen = gen.With(gen.WithIntervalMatching(store.Cfg.BaseInterval)).With(gen.WithOrgID(orgID))
alertRules := []*models.AlertRule{}
alertRules := make([]*models.AlertRule, 0, 5)
for i := 0; i < 5; i++ {
alertRules = append(alertRules, createRule(t, store, gen))
}
@ -1311,9 +1312,9 @@ func TestIncreaseVersionForAllRulesInNamespaces(t *testing.T) {
requireAlertRuleVersion := func(t *testing.T, ruleID int64, orgID int64, expectedVersion int64) {
t.Helper()
dbrule := &models.AlertRule{}
dbrule := &alertRule{}
err := sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
exist, err := sess.Table(models.AlertRule{}).ID(ruleID).Get(dbrule)
exist, err := sess.Table(alertRule{}).ID(ruleID).Get(dbrule)
require.Truef(t, exist, fmt.Sprintf("rule with ID %d does not exist", ruleID))
return err
})
@ -1342,24 +1343,25 @@ func createRule(t *testing.T, store *DBstore, generator *models.AlertRuleGenerat
generator = models.RuleGen.With(models.RuleMuts.WithIntervalMatching(store.Cfg.BaseInterval))
}
rule := generator.GenerateRef()
err := store.SQLStore.WithDbSession(context.Background(), func(sess *db.Session) error {
_, err := sess.Table(models.AlertRule{}).InsertOne(rule)
converted, err := alertRuleFromModelsAlertRule(*rule)
require.NoError(t, err)
err = store.SQLStore.WithDbSession(context.Background(), func(sess *db.Session) error {
converted.ID = 0
_, err := sess.Table(alertRule{}).InsertOne(&converted)
if err != nil {
return err
}
dbRule := &models.AlertRule{}
exist, err := sess.Table(models.AlertRule{}).ID(rule.ID).Get(dbRule)
dbRule := &alertRule{}
exist, err := sess.Table(alertRule{}).ID(converted.ID).Get(dbRule)
if err != nil {
return err
}
if !exist {
return errors.New("cannot read inserted record")
}
rule = dbRule
require.NoError(t, err)
return nil
r, err := alertRuleToModelsAlertRule(*dbRule, &logtest.Fake{})
rule = &r
return err
})
require.NoError(t, err)

View File

@ -0,0 +1,181 @@
package store
import (
"encoding/json"
"fmt"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/ngalert/models"
)
func alertRuleToModelsAlertRule(ar alertRule, l log.Logger) (models.AlertRule, error) {
var data []models.AlertQuery
err := json.Unmarshal([]byte(ar.Data), &data)
if err != nil {
return models.AlertRule{}, fmt.Errorf("failed to parse data: %w", err)
}
result := models.AlertRule{
ID: ar.ID,
OrgID: ar.OrgID,
Title: ar.Title,
Condition: ar.Condition,
Data: data,
Updated: ar.Updated,
IntervalSeconds: ar.IntervalSeconds,
Version: ar.Version,
UID: ar.UID,
NamespaceUID: ar.NamespaceUID,
DashboardUID: ar.DashboardUID,
PanelID: ar.PanelID,
RuleGroup: ar.RuleGroup,
RuleGroupIndex: ar.RuleGroupIndex,
For: ar.For,
IsPaused: ar.IsPaused,
}
if ar.NoDataState != "" {
result.NoDataState, err = models.NoDataStateFromString(ar.NoDataState)
if err != nil {
l.Warn("Unknown NoDataState value, defaulting to NoData", append(result.GetKey().LogContext(), "original", ar.NoDataState)...)
result.NoDataState = models.NoData
}
}
if ar.ExecErrState != "" {
result.ExecErrState, err = models.ErrStateFromString(ar.ExecErrState)
if err != nil {
l.Warn("Unknown ExecErrState value, defaulting to Error", append(result.GetKey().LogContext(), "original", ar.ExecErrState)...)
result.ExecErrState = models.ErrorErrState
}
}
if ar.Record != "" {
var record models.Record
err = json.Unmarshal([]byte(ar.Record), &record)
if err != nil {
return models.AlertRule{}, fmt.Errorf("failed to parse record: %w", err)
}
result.Record = &record
}
if ar.Labels != "" {
err = json.Unmarshal([]byte(ar.Labels), &result.Labels)
if err != nil {
return models.AlertRule{}, fmt.Errorf("failed to parse labels: %w", err)
}
}
if ar.Annotations != "" {
err = json.Unmarshal([]byte(ar.Annotations), &result.Annotations)
if err != nil {
return models.AlertRule{}, fmt.Errorf("failed to parse annotations: %w", err)
}
}
if ar.NotificationSettings != "" {
ns, err := parseNotificationSettings(ar.NotificationSettings)
if err != nil {
return models.AlertRule{}, fmt.Errorf("failed to parse notification settings: %w", err)
}
result.NotificationSettings = ns
}
return result, nil
}
func parseNotificationSettings(s string) ([]models.NotificationSettings, error) {
var result []models.NotificationSettings
if err := json.Unmarshal([]byte(s), &result); err != nil {
return nil, err
}
return result, nil
}
func alertRuleFromModelsAlertRule(ar models.AlertRule) (alertRule, error) {
result := alertRule{
ID: ar.ID,
OrgID: ar.OrgID,
Title: ar.Title,
Condition: ar.Condition,
Updated: ar.Updated,
IntervalSeconds: ar.IntervalSeconds,
Version: ar.Version,
UID: ar.UID,
NamespaceUID: ar.NamespaceUID,
DashboardUID: ar.DashboardUID,
PanelID: ar.PanelID,
RuleGroup: ar.RuleGroup,
RuleGroupIndex: ar.RuleGroupIndex,
NoDataState: ar.NoDataState.String(),
ExecErrState: ar.ExecErrState.String(),
For: ar.For,
IsPaused: ar.IsPaused,
}
// Serialize complex types to JSON strings
data, err := json.Marshal(ar.Data)
if err != nil {
return alertRule{}, fmt.Errorf("failed to marshal data: %w", err)
}
result.Data = string(data)
if ar.Record != nil {
recordData, err := json.Marshal(ar.Record)
if err != nil {
return alertRule{}, fmt.Errorf("failed to marshal record: %w", err)
}
result.Record = string(recordData)
}
if len(ar.Annotations) > 0 {
annotationsData, err := json.Marshal(ar.Annotations)
if err != nil {
return alertRule{}, fmt.Errorf("failed to marshal annotations: %w", err)
}
result.Annotations = string(annotationsData)
}
if len(ar.Labels) > 0 {
labelsData, err := json.Marshal(ar.Labels)
if err != nil {
return alertRule{}, fmt.Errorf("failed to marshal labels: %w", err)
}
result.Labels = string(labelsData)
}
if len(ar.NotificationSettings) > 0 {
notificationSettingsData, err := json.Marshal(ar.NotificationSettings)
if err != nil {
return alertRule{}, fmt.Errorf("failed to marshal notification settings: %w", err)
}
result.NotificationSettings = string(notificationSettingsData)
}
return result, nil
}
func alertRuleToAlertRuleVersion(rule alertRule) alertRuleVersion {
return alertRuleVersion{
RuleOrgID: rule.OrgID,
RuleUID: rule.UID,
RuleNamespaceUID: rule.NamespaceUID,
RuleGroup: rule.RuleGroup,
RuleGroupIndex: rule.RuleGroupIndex,
ParentVersion: 0,
RestoredFrom: 0,
Version: rule.Version,
Created: rule.Updated, // assuming the Updated time as the creation time
Title: rule.Title,
Condition: rule.Condition,
Data: rule.Data,
IntervalSeconds: rule.IntervalSeconds,
Record: rule.Record,
NoDataState: rule.NoDataState,
ExecErrState: rule.ExecErrState,
For: rule.For,
Annotations: rule.Annotations,
Labels: rule.Labels,
IsPaused: rule.IsPaused,
NotificationSettings: rule.NotificationSettings,
}
}

View File

@ -0,0 +1,45 @@
package store
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log/logtest"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/util"
)
func TestAlertRuleToModelsAlertRule(t *testing.T) {
g := ngmodels.RuleGen
t.Run("make sure no data is lost between conversions", func(t *testing.T) {
for _, rule := range g.GenerateMany(100) {
r, err := alertRuleFromModelsAlertRule(rule)
require.NoError(t, err)
clone, err := alertRuleToModelsAlertRule(r, &logtest.Fake{})
require.NoError(t, err)
require.Empty(t, rule.Diff(&clone))
}
})
t.Run("should use NoData if NoDataState is not known", func(t *testing.T) {
rule, err := alertRuleFromModelsAlertRule(g.Generate())
require.NoError(t, err)
rule.NoDataState = util.GenerateShortUID()
converted, err := alertRuleToModelsAlertRule(rule, &logtest.Fake{})
require.NoError(t, err)
require.Equal(t, ngmodels.NoData, converted.NoDataState)
})
t.Run("should use Error if ExecErrState is not known", func(t *testing.T) {
rule, err := alertRuleFromModelsAlertRule(g.Generate())
require.NoError(t, err)
rule.ExecErrState = util.GenerateShortUID()
converted, err := alertRuleToModelsAlertRule(rule, &logtest.Fake{})
require.NoError(t, err)
require.Equal(t, ngmodels.ErrorErrState, converted.ExecErrState)
})
}

View File

@ -0,0 +1,66 @@
package store
import "time"
// alertRule represents a record in alert_rule table
type alertRule struct {
ID int64 `xorm:"pk autoincr 'id'"`
OrgID int64 `xorm:"org_id"`
Title string
Condition string
Data string
Updated time.Time
IntervalSeconds int64
Version int64 `xorm:"version"` // this tag makes xorm add optimistic lock (see https://xorm.io/docs/chapter-06/1.lock/)
UID string `xorm:"uid"`
NamespaceUID string `xorm:"namespace_uid"`
DashboardUID *string `xorm:"dashboard_uid"`
PanelID *int64 `xorm:"panel_id"`
RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
Record string
NoDataState string
ExecErrState string
For time.Duration
Annotations string
Labels string
IsPaused bool
NotificationSettings string `xorm:"notification_settings"`
}
func (a alertRule) TableName() string {
return "alert_rule"
}
// alertRuleVersion represents a record in alert_rule_version table
type alertRuleVersion struct {
ID int64 `xorm:"pk autoincr 'id'"`
RuleOrgID int64 `xorm:"rule_org_id"`
RuleUID string `xorm:"rule_uid"`
RuleNamespaceUID string `xorm:"rule_namespace_uid"`
RuleGroup string
RuleGroupIndex int `xorm:"rule_group_idx"`
ParentVersion int64
RestoredFrom int64
Version int64
Created time.Time
Title string
Condition string
Data string
IntervalSeconds int64
Record string
NoDataState string
ExecErrState string
// ideally this field should have been apimodels.ApiDuration
// but this is currently not possible because of circular dependencies
For time.Duration
Annotations string
Labels string
IsPaused bool
NotificationSettings string `xorm:"notification_settings"`
}
func (a alertRuleVersion) TableName() string {
return "alert_rule_version"
}

View File

@ -5,8 +5,13 @@ import (
"strings"
"sync"
"testing"
"time"
"github.com/grafana/grafana/pkg/infra/log/logtest"
"github.com/grafana/grafana/pkg/services/folder/foldertest"
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
)
func NewFakeImageStore(t *testing.T, images ...*models.Image) *FakeImageStore {
@ -128,3 +133,19 @@ func (f *FakeAdminConfigStore) UpdateAdminConfiguration(cmd UpdateAdminConfigura
return nil
}
func SetupStoreForTesting(t *testing.T, db *sqlstore.ReplStore) *DBstore {
t.Helper()
cfg := setting.NewCfg()
cfg.UnifiedAlerting = setting.UnifiedAlertingSettings{BaseInterval: 1 * time.Second}
service := foldertest.NewFakeService()
store := &DBstore{
SQLStore: db,
Cfg: cfg.UnifiedAlerting,
FolderService: service,
Logger: &logtest.Fake{},
}
return store
}

View File

@ -30,7 +30,7 @@ func newPublicDashboardServiceImpl(
db, cfg := db.InitTestReplDBWithCfg(t)
tagService := tagimpl.ProvideService(db)
if annotationsRepo == nil {
annotationsRepo = annotationsimpl.ProvideService(db, cfg, featuremgmt.WithFeatures(), tagService, tracing.InitializeTracerForTest())
annotationsRepo = annotationsimpl.ProvideService(db, cfg, featuremgmt.WithFeatures(), tagService, tracing.InitializeTracerForTest(), nil)
}
if publicDashboardStore == nil {