Alerting: State manager to use tick time to determine stale states (#50991)

* use correct stale timestamp
* calculate stale using tick time instead of time.now

* remove unused dependency on sql store
This commit is contained in:
Yuriy Tseretyan 2022-06-21 18:16:53 -04:00 committed by GitHub
parent 3543e05f49
commit 157c12211d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 191 additions and 47 deletions

View File

@ -48,6 +48,7 @@ Scopes must have an order to ensure consistency and ease of search, this helps u
- [CHANGE] Rule API to reject request to update rules that affects provisioned rules #50835
- [FEATURE] Add first Grafana reserved label, grafana_folder is created during runtime and stores an alert's folder/namespace title #50262
- [FEATURE] use optimistic lock by version field when updating alert rules #50274
- [BUGFIX] State manager to use tick time to determine stale states #50991
- [ENHANCEMENT] Scheduler: Drop ticks if rule evaluation is too slow and adds a metric grafana_alerting_schedule_rule_evaluations_missed_total to track missed evaluations per rule #48885
- [ENHANCEMENT] Ticker to tick at predictable time #50197

View File

@ -151,7 +151,7 @@ func (ng *AlertNG) init() error {
appUrl = nil
}
stateManager := state.NewManager(ng.Log, ng.Metrics.GetStateMetrics(), appUrl, store, store, ng.SQLStore, ng.dashboardService, ng.imageService)
stateManager := state.NewManager(ng.Log, ng.Metrics.GetStateMetrics(), appUrl, store, store, ng.dashboardService, ng.imageService)
scheduler := schedule.NewScheduler(schedCfg, ng.ExpressionService, appUrl, stateManager, ng.bus)
ng.stateManager = stateManager

View File

@ -640,7 +640,7 @@ func (sch *schedule) ruleRoutine(grafanaCtx context.Context, key ngmodels.AlertR
}
logger.Debug("alert rule evaluated", "results", results, "duration", dur)
processedStates := sch.stateManager.ProcessEvalResults(ctx, r, results)
processedStates := sch.stateManager.ProcessEvalResults(ctx, e.scheduledAt, r, results)
sch.saveAlertStates(ctx, processedStates)
alerts := FromAlertStateToPostableAlerts(processedStates, sch.stateManager, sch.appURL)

View File

@ -40,7 +40,7 @@ func TestWarmStateCache(t *testing.T) {
evaluationTime, err := time.Parse("2006-01-02", "2021-03-25")
require.NoError(t, err)
ctx := context.Background()
ng, dbstore := tests.SetupTestEnv(t, 1)
_, dbstore := tests.SetupTestEnv(t, 1)
const mainOrgID int64 = 1
rule := tests.CreateTestAlertRule(t, ctx, dbstore, 600, mainOrgID)
@ -108,7 +108,7 @@ func TestWarmStateCache(t *testing.T) {
Metrics: testMetrics.GetSchedulerMetrics(),
AdminConfigPollInterval: 10 * time.Minute, // do not poll in unit tests.
}
st := state.NewManager(schedCfg.Logger, testMetrics.GetStateMetrics(), nil, dbstore, dbstore, ng.SQLStore, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
st := state.NewManager(schedCfg.Logger, testMetrics.GetStateMetrics(), nil, dbstore, dbstore, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
st.Warm(ctx)
t.Run("instance cache has expected entries", func(t *testing.T) {
@ -126,7 +126,7 @@ func TestWarmStateCache(t *testing.T) {
func TestAlertingTicker(t *testing.T) {
ctx := context.Background()
ng, dbstore := tests.SetupTestEnv(t, 1)
_, dbstore := tests.SetupTestEnv(t, 1)
alerts := make([]*models.AlertRule, 0)
@ -160,7 +160,7 @@ func TestAlertingTicker(t *testing.T) {
disabledOrgID: {},
},
}
st := state.NewManager(schedCfg.Logger, testMetrics.GetStateMetrics(), nil, dbstore, dbstore, ng.SQLStore, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
st := state.NewManager(schedCfg.Logger, testMetrics.GetStateMetrics(), nil, dbstore, dbstore, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
appUrl := &url.URL{
Scheme: "http",
Host: "localhost",

View File

@ -34,7 +34,6 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/secrets/fakes"
secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
"github.com/grafana/grafana/pkg/services/sqlstore/mockstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
@ -959,7 +958,7 @@ func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, ac
Metrics: m.GetSchedulerMetrics(),
AdminConfigPollInterval: 10 * time.Minute, // do not poll in unit tests.
}
st := state.NewManager(schedCfg.Logger, m.GetStateMetrics(), nil, rs, is, mockstore.NewSQLStoreMock(), &dashboards.FakeDashboardService{}, &image.NoopImageService{})
st := state.NewManager(schedCfg.Logger, m.GetStateMetrics(), nil, rs, is, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
appUrl := &url.URL{
Scheme: "http",
Host: "localhost",

View File

@ -21,7 +21,6 @@ import (
ngModels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/screenshot"
"github.com/grafana/grafana/pkg/services/sqlstore"
)
var ResendDelay = 30 * time.Second
@ -42,13 +41,12 @@ type Manager struct {
ruleStore store.RuleStore
instanceStore store.InstanceStore
sqlStore sqlstore.Store
dashboardService dashboards.DashboardService
imageService image.ImageService
}
func NewManager(logger log.Logger, metrics *metrics.State, externalURL *url.URL,
ruleStore store.RuleStore, instanceStore store.InstanceStore, sqlStore sqlstore.Store,
ruleStore store.RuleStore, instanceStore store.InstanceStore,
dashboardService dashboards.DashboardService, imageService image.ImageService) *Manager {
manager := &Manager{
cache: newCache(logger, metrics, externalURL),
@ -58,7 +56,6 @@ func NewManager(logger log.Logger, metrics *metrics.State, externalURL *url.URL,
metrics: metrics,
ruleStore: ruleStore,
instanceStore: instanceStore,
sqlStore: sqlStore,
dashboardService: dashboardService,
imageService: imageService,
}
@ -158,7 +155,7 @@ func (st *Manager) RemoveByRuleUID(orgID int64, ruleUID string) {
st.cache.removeByRuleUID(orgID, ruleUID)
}
func (st *Manager) ProcessEvalResults(ctx context.Context, alertRule *ngModels.AlertRule, results eval.Results) []*State {
func (st *Manager) ProcessEvalResults(ctx context.Context, evaluatedAt time.Time, alertRule *ngModels.AlertRule, results eval.Results) []*State {
st.log.Debug("state manager processing evaluation results", "uid", alertRule.UID, "resultCount", len(results))
var states []*State
processedResults := make(map[string]*State, len(results))
@ -167,7 +164,7 @@ func (st *Manager) ProcessEvalResults(ctx context.Context, alertRule *ngModels.A
states = append(states, s)
processedResults[s.CacheId] = s
}
st.staleResultsHandler(ctx, alertRule, processedResults)
st.staleResultsHandler(ctx, evaluatedAt, alertRule, processedResults)
return states
}
@ -368,11 +365,11 @@ func (st *Manager) annotateState(ctx context.Context, alertRule *ngModels.AlertR
}
}
func (st *Manager) staleResultsHandler(ctx context.Context, alertRule *ngModels.AlertRule, states map[string]*State) {
func (st *Manager) staleResultsHandler(ctx context.Context, evaluatedAt time.Time, alertRule *ngModels.AlertRule, states map[string]*State) {
allStates := st.GetStatesForRuleUID(alertRule.OrgID, alertRule.UID)
for _, s := range allStates {
_, ok := states[s.CacheId]
if !ok && isItStale(s.LastEvaluationTime, alertRule.IntervalSeconds) {
if !ok && isItStale(evaluatedAt, s.LastEvaluationTime, alertRule.IntervalSeconds) {
st.log.Debug("removing stale state entry", "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID, "cacheID", s.CacheId)
st.cache.deleteEntry(s.OrgID, s.AlertRuleUID, s.CacheId)
ilbs := ngModels.InstanceLabels(s.Labels)
@ -386,7 +383,7 @@ func (st *Manager) staleResultsHandler(ctx context.Context, alertRule *ngModels.
}
if s.State == eval.Alerting {
st.annotateState(ctx, alertRule, s.Labels, time.Now(),
st.annotateState(ctx, alertRule, s.Labels, evaluatedAt,
InstanceStateAndReason{State: eval.Normal, Reason: ""},
InstanceStateAndReason{State: s.State, Reason: s.StateReason})
}
@ -394,8 +391,8 @@ func (st *Manager) staleResultsHandler(ctx context.Context, alertRule *ngModels.
}
}
func isItStale(lastEval time.Time, intervalSeconds int64) bool {
return lastEval.Add(2 * time.Duration(intervalSeconds) * time.Second).Before(time.Now())
func isItStale(evaluatedAt time.Time, lastEval time.Time, intervalSeconds int64) bool {
return !lastEval.Add(2 * time.Duration(intervalSeconds) * time.Second).After(evaluatedAt)
}
func removePrivateLabels(labels data.Labels) data.Labels {

View File

@ -5,6 +5,9 @@ import (
"fmt"
"math/rand"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/dashboards"
@ -12,8 +15,6 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/sqlstore/mockstore"
"github.com/stretchr/testify/require"
)
// Not for parallel tests.
@ -91,7 +92,7 @@ func Test_maybeNewImage(t *testing.T) {
t.Run(test.description, func(t *testing.T) {
imageService := &CountingImageService{}
mgr := NewManager(log.NewNopLogger(), &metrics.State{}, nil,
&store.FakeRuleStore{}, &store.FakeInstanceStore{}, mockstore.NewSQLStoreMock(),
&store.FakeRuleStore{}, &store.FakeInstanceStore{},
&dashboards.FakeDashboardService{}, imageService)
err := mgr.maybeTakeScreenshot(context.Background(), &ngmodels.AlertRule{}, test.state, test.oldState)
require.NoError(t, err)
@ -104,3 +105,45 @@ func Test_maybeNewImage(t *testing.T) {
})
}
}
func TestIsItStale(t *testing.T) {
now := time.Now()
intervalSeconds := rand.Int63n(10) + 5
testCases := []struct {
name string
lastEvaluation time.Time
expectedResult bool
}{
{
name: "false if last evaluation is now",
lastEvaluation: now,
expectedResult: false,
},
{
name: "false if last evaluation is 1 interval before now",
lastEvaluation: now.Add(-time.Duration(intervalSeconds)),
expectedResult: false,
},
{
name: "false if last evaluation is little less than 2 interval before now",
lastEvaluation: now.Add(-time.Duration(intervalSeconds) * time.Second * 2).Add(100 * time.Millisecond),
expectedResult: false,
},
{
name: "true if last evaluation is 2 intervals from now",
lastEvaluation: now.Add(-time.Duration(intervalSeconds) * time.Second * 2),
expectedResult: true,
},
{
name: "true if last evaluation is 3 intervals from now",
lastEvaluation: now.Add(-time.Duration(intervalSeconds) * time.Second * 3),
expectedResult: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
require.Equal(t, tc.expectedResult, isItStale(now, tc.lastEvaluation, intervalSeconds))
})
}
}

View File

@ -24,7 +24,6 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/state"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/ngalert/tests"
"github.com/grafana/grafana/pkg/services/sqlstore/mockstore"
)
var testMetrics = metrics.NewNGAlert(prometheus.NewPedanticRegistry())
@ -36,8 +35,7 @@ func TestDashboardAnnotations(t *testing.T) {
ctx := context.Background()
_, dbstore := tests.SetupTestEnv(t, 1)
sqlStore := mockstore.NewSQLStoreMock()
st := state.NewManager(log.New("test_stale_results_handler"), testMetrics.GetStateMetrics(), nil, dbstore, dbstore, sqlStore, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
st := state.NewManager(log.New("test_stale_results_handler"), testMetrics.GetStateMetrics(), nil, dbstore, dbstore, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
fakeAnnoRepo := store.NewFakeAnnotationsRepo()
annotations.SetRepository(fakeAnnoRepo)
@ -50,7 +48,7 @@ func TestDashboardAnnotations(t *testing.T) {
})
st.Warm(ctx)
_ = st.ProcessEvalResults(ctx, rule, eval.Results{{
_ = st.ProcessEvalResults(ctx, evaluationTime, rule, eval.Results{{
Instance: data.Labels{"instance_label": "testValue2"},
State: eval.Alerting,
EvaluatedAt: evaluationTime,
@ -875,6 +873,31 @@ func TestProcessEvalResults(t *testing.T) {
},
expectedAnnotations: 1,
expectedStates: map[string]*state.State{
`[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["instance_label","test"],["label","test"]]`: {
AlertRuleUID: "test_alert_rule_uid_2",
OrgID: 1,
CacheId: `[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["instance_label","test"],["label","test"]]`,
Labels: data.Labels{
"__alert_rule_namespace_uid__": "test_namespace_uid",
"__alert_rule_uid__": "test_alert_rule_uid_2",
"alertname": "test_title",
"label": "test",
"instance_label": "test",
},
State: eval.Normal,
Results: []state.Evaluation{
{
EvaluationTime: evaluationTime,
EvaluationState: eval.Normal,
Values: make(map[string]*float64),
},
},
StartsAt: time.Time{},
EndsAt: time.Time{},
LastEvaluationTime: evaluationTime,
EvaluationDuration: evaluationDuration,
Annotations: map[string]string{"annotation": "test"},
},
`[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["label","test"]]`: {
AlertRuleUID: "test_alert_rule_uid_2",
OrgID: 1,
@ -939,6 +962,56 @@ func TestProcessEvalResults(t *testing.T) {
},
expectedAnnotations: 1,
expectedStates: map[string]*state.State{
`[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["instance_label","test-1"],["label","test"]]`: {
AlertRuleUID: "test_alert_rule_uid_2",
OrgID: 1,
CacheId: `[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["instance_label","test-1"],["label","test"]]`,
Labels: data.Labels{
"__alert_rule_namespace_uid__": "test_namespace_uid",
"__alert_rule_uid__": "test_alert_rule_uid_2",
"alertname": "test_title",
"label": "test",
"instance_label": "test-1",
},
State: eval.Normal,
Results: []state.Evaluation{
{
EvaluationTime: evaluationTime,
EvaluationState: eval.Normal,
Values: make(map[string]*float64),
},
},
StartsAt: time.Time{},
EndsAt: time.Time{},
LastEvaluationTime: evaluationTime,
EvaluationDuration: evaluationDuration,
Annotations: map[string]string{"annotation": "test"},
},
`[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["instance_label","test-2"],["label","test"]]`: {
AlertRuleUID: "test_alert_rule_uid_2",
OrgID: 1,
CacheId: `[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["instance_label","test-2"],["label","test"]]`,
Labels: data.Labels{
"__alert_rule_namespace_uid__": "test_namespace_uid",
"__alert_rule_uid__": "test_alert_rule_uid_2",
"alertname": "test_title",
"label": "test",
"instance_label": "test-2",
},
State: eval.Normal,
Results: []state.Evaluation{
{
EvaluationTime: evaluationTime,
EvaluationState: eval.Normal,
Values: make(map[string]*float64),
},
},
StartsAt: time.Time{},
EndsAt: time.Time{},
LastEvaluationTime: evaluationTime,
EvaluationDuration: evaluationDuration,
Annotations: map[string]string{"annotation": "test"},
},
`[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["label","test"]]`: {
AlertRuleUID: "test_alert_rule_uid_2",
OrgID: 1,
@ -1018,6 +1091,11 @@ func TestProcessEvalResults(t *testing.T) {
},
State: eval.Normal,
Results: []state.Evaluation{
{
EvaluationTime: evaluationTime,
EvaluationState: eval.Normal,
Values: make(map[string]*float64),
},
{
EvaluationTime: evaluationTime.Add(20 * time.Second),
EvaluationState: eval.Normal,
@ -1030,6 +1108,30 @@ func TestProcessEvalResults(t *testing.T) {
EvaluationDuration: evaluationDuration,
Annotations: map[string]string{"annotation": "test"},
},
`[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["label","test"]]`: {
AlertRuleUID: "test_alert_rule_uid_2",
OrgID: 1,
CacheId: `[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["label","test"]]`,
Labels: data.Labels{
"__alert_rule_namespace_uid__": "test_namespace_uid",
"__alert_rule_uid__": "test_alert_rule_uid_2",
"alertname": "test_title",
"label": "test",
},
State: eval.NoData,
Results: []state.Evaluation{
{
EvaluationTime: evaluationTime.Add(10 * time.Second),
EvaluationState: eval.NoData,
Values: make(map[string]*float64),
},
},
StartsAt: evaluationTime.Add(10 * time.Second),
EndsAt: evaluationTime.Add(10 * time.Second).Add(state.ResendDelay * 3),
LastEvaluationTime: evaluationTime.Add(10 * time.Second),
EvaluationDuration: evaluationDuration,
Annotations: map[string]string{"annotation": "test"},
},
},
},
{
@ -1778,14 +1880,13 @@ func TestProcessEvalResults(t *testing.T) {
}
for _, tc := range testCases {
ss := mockstore.NewSQLStoreMock()
st := state.NewManager(log.New("test_state_manager"), testMetrics.GetStateMetrics(), nil, nil, &store.FakeInstanceStore{}, ss, &dashboards.FakeDashboardService{}, &image.NotAvailableImageService{})
st := state.NewManager(log.New("test_state_manager"), testMetrics.GetStateMetrics(), nil, nil, &store.FakeInstanceStore{}, &dashboards.FakeDashboardService{}, &image.NotAvailableImageService{})
t.Run(tc.desc, func(t *testing.T) {
fakeAnnoRepo := store.NewFakeAnnotationsRepo()
annotations.SetRepository(fakeAnnoRepo)
for _, res := range tc.evalResults {
_ = st.ProcessEvalResults(context.Background(), tc.alertRule, res)
_ = st.ProcessEvalResults(context.Background(), evaluationTime, tc.alertRule, res)
}
states := st.GetStatesForRuleUID(tc.alertRule.OrgID, tc.alertRule.UID)
@ -1815,25 +1916,23 @@ func printAllAnnotations(annos []*annotations.Item) string {
}
func TestStaleResultsHandler(t *testing.T) {
evaluationTime, err := time.Parse("2006-01-02", "2021-03-25")
if err != nil {
t.Fatalf("error parsing date format: %s", err.Error())
}
evaluationTime := time.Now()
interval := 60 * time.Second
ctx := context.Background()
_, dbstore := tests.SetupTestEnv(t, 1)
const mainOrgID int64 = 1
rule := tests.CreateTestAlertRule(t, ctx, dbstore, 600, mainOrgID)
rule := tests.CreateTestAlertRule(t, ctx, dbstore, int64(interval.Seconds()), mainOrgID)
lastEval := evaluationTime.Add(-2 * interval)
saveCmd1 := &models.SaveAlertInstanceCommand{
RuleOrgID: rule.OrgID,
RuleUID: rule.UID,
Labels: models.InstanceLabels{"test1": "testValue1"},
State: models.InstanceStateNormal,
LastEvalTime: evaluationTime,
CurrentStateSince: evaluationTime.Add(-1 * time.Minute),
CurrentStateEnd: evaluationTime.Add(1 * time.Minute),
LastEvalTime: lastEval,
CurrentStateSince: lastEval,
CurrentStateEnd: lastEval.Add(3 * interval),
}
_ = dbstore.SaveAlertInstance(ctx, saveCmd1)
@ -1843,9 +1942,9 @@ func TestStaleResultsHandler(t *testing.T) {
RuleUID: rule.UID,
Labels: models.InstanceLabels{"test2": "testValue2"},
State: models.InstanceStateFiring,
LastEvalTime: evaluationTime,
CurrentStateSince: evaluationTime.Add(-1 * time.Minute),
CurrentStateEnd: evaluationTime.Add(1 * time.Minute),
LastEvalTime: lastEval,
CurrentStateSince: lastEval,
CurrentStateEnd: lastEval.Add(3 * interval),
}
_ = dbstore.SaveAlertInstance(ctx, saveCmd2)
@ -1863,7 +1962,7 @@ func TestStaleResultsHandler(t *testing.T) {
eval.Result{
Instance: data.Labels{"test1": "testValue1"},
State: eval.Normal,
EvaluatedAt: evaluationTime.Add(3 * time.Minute),
EvaluatedAt: evaluationTime,
},
},
},
@ -1881,13 +1980,13 @@ func TestStaleResultsHandler(t *testing.T) {
State: eval.Normal,
Results: []state.Evaluation{
{
EvaluationTime: evaluationTime.Add(3 * time.Minute),
EvaluationTime: evaluationTime,
EvaluationState: eval.Normal,
Values: make(map[string]*float64),
Condition: "A",
},
},
LastEvaluationTime: evaluationTime.Add(3 * time.Minute),
LastEvaluationTime: evaluationTime,
EvaluationDuration: 0,
Annotations: map[string]string{"testAnnoKey": "testAnnoValue"},
},
@ -1899,15 +1998,20 @@ func TestStaleResultsHandler(t *testing.T) {
for _, tc := range testCases {
ctx := context.Background()
sqlStore := mockstore.NewSQLStoreMock()
st := state.NewManager(log.New("test_stale_results_handler"), testMetrics.GetStateMetrics(), nil, dbstore, dbstore, sqlStore, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
st := state.NewManager(log.New("test_stale_results_handler"), testMetrics.GetStateMetrics(), nil, dbstore, dbstore, &dashboards.FakeDashboardService{}, &image.NoopImageService{})
st.Warm(ctx)
existingStatesForRule := st.GetStatesForRuleUID(rule.OrgID, rule.UID)
// We have loaded the expected number of entries from the db
assert.Equal(t, tc.startingStateCount, len(existingStatesForRule))
for _, res := range tc.evalResults {
st.ProcessEvalResults(context.Background(), rule, res)
evalTime := evaluationTime
for _, re := range res {
if re.EvaluatedAt.After(evalTime) {
evalTime = re.EvaluatedAt
}
}
st.ProcessEvalResults(context.Background(), evalTime, rule, res)
for _, s := range tc.expectedStates {
cachedState, err := st.Get(s.OrgID, s.AlertRuleUID, s.CacheId)
require.NoError(t, err)