2022-05-11 09:04:50 -05:00
|
|
|
package schedule
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2022-06-08 11:50:44 -05:00
|
|
|
"math"
|
2022-05-11 09:04:50 -05:00
|
|
|
"math/rand"
|
|
|
|
"runtime"
|
|
|
|
"sync"
|
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2022-06-07 10:20:06 -05:00
|
|
|
"github.com/stretchr/testify/assert"
|
2022-05-11 09:04:50 -05:00
|
|
|
"github.com/stretchr/testify/require"
|
2022-06-07 10:20:06 -05:00
|
|
|
|
|
|
|
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
2022-08-31 10:08:19 -05:00
|
|
|
"github.com/grafana/grafana/pkg/util"
|
2022-05-11 09:04:50 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
func TestSchedule_alertRuleInfo(t *testing.T) {
|
2022-06-08 11:50:44 -05:00
|
|
|
type evalResponse struct {
|
|
|
|
success bool
|
|
|
|
droppedEval *evaluation
|
|
|
|
}
|
|
|
|
|
2022-05-11 09:04:50 -05:00
|
|
|
t.Run("when rule evaluation is not stopped", func(t *testing.T) {
|
2022-07-15 11:32:52 -05:00
|
|
|
t.Run("update should send to updateCh", func(t *testing.T) {
|
2022-05-11 09:04:50 -05:00
|
|
|
r := newAlertRuleInfo(context.Background())
|
|
|
|
resultCh := make(chan bool)
|
|
|
|
go func() {
|
2022-07-15 11:32:52 -05:00
|
|
|
resultCh <- r.update(ruleVersion(rand.Int63()))
|
2022-05-11 09:04:50 -05:00
|
|
|
}()
|
|
|
|
select {
|
|
|
|
case <-r.updateCh:
|
|
|
|
require.True(t, <-resultCh)
|
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
t.Fatal("No message was received on update channel")
|
|
|
|
}
|
|
|
|
})
|
2022-07-15 11:32:52 -05:00
|
|
|
t.Run("update should drop any concurrent sending to updateCh", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
|
|
|
version1 := ruleVersion(rand.Int31())
|
|
|
|
version2 := version1 + 1
|
|
|
|
|
|
|
|
wg := sync.WaitGroup{}
|
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
wg.Done()
|
|
|
|
r.update(version1)
|
|
|
|
wg.Done()
|
|
|
|
}()
|
|
|
|
wg.Wait()
|
|
|
|
wg.Add(2) // one when time1 is sent, another when go-routine for time2 has started
|
|
|
|
go func() {
|
|
|
|
wg.Done()
|
|
|
|
r.update(version2)
|
|
|
|
}()
|
|
|
|
wg.Wait() // at this point tick 1 has already been dropped
|
|
|
|
select {
|
|
|
|
case version := <-r.updateCh:
|
|
|
|
require.Equal(t, version2, version)
|
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
t.Fatal("No message was received on eval channel")
|
|
|
|
}
|
|
|
|
})
|
|
|
|
t.Run("update should drop any concurrent sending to updateCh and use greater version", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
|
|
|
version1 := ruleVersion(rand.Int31())
|
|
|
|
version2 := version1 + 1
|
|
|
|
|
|
|
|
wg := sync.WaitGroup{}
|
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
wg.Done()
|
|
|
|
r.update(version2)
|
|
|
|
wg.Done()
|
|
|
|
}()
|
|
|
|
wg.Wait()
|
|
|
|
wg.Add(2) // one when time1 is sent, another when go-routine for time2 has started
|
|
|
|
go func() {
|
|
|
|
wg.Done()
|
|
|
|
r.update(version1)
|
|
|
|
}()
|
|
|
|
wg.Wait() // at this point tick 1 has already been dropped
|
|
|
|
select {
|
|
|
|
case version := <-r.updateCh:
|
|
|
|
require.Equal(t, version2, version)
|
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
t.Fatal("No message was received on eval channel")
|
|
|
|
}
|
|
|
|
})
|
2022-05-11 09:04:50 -05:00
|
|
|
t.Run("eval should send to evalCh", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
|
|
|
expected := time.Now()
|
2022-06-08 11:50:44 -05:00
|
|
|
resultCh := make(chan evalResponse)
|
2022-08-31 10:08:19 -05:00
|
|
|
data := &evaluation{
|
|
|
|
scheduledAt: expected,
|
|
|
|
rule: models.AlertRuleGen()(),
|
|
|
|
folderTitle: util.GenerateShortUID(),
|
|
|
|
}
|
2022-05-11 09:04:50 -05:00
|
|
|
go func() {
|
2022-08-31 10:08:19 -05:00
|
|
|
result, dropped := r.eval(data)
|
2022-06-08 11:50:44 -05:00
|
|
|
resultCh <- evalResponse{result, dropped}
|
2022-05-11 09:04:50 -05:00
|
|
|
}()
|
|
|
|
select {
|
|
|
|
case ctx := <-r.evalCh:
|
2022-08-31 10:08:19 -05:00
|
|
|
require.Equal(t, data, ctx)
|
2022-06-08 11:50:44 -05:00
|
|
|
result := <-resultCh
|
|
|
|
require.True(t, result.success)
|
|
|
|
require.Nilf(t, result.droppedEval, "expected no dropped evaluations but got one")
|
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
t.Fatal("No message was received on eval channel")
|
|
|
|
}
|
|
|
|
})
|
|
|
|
t.Run("eval should drop any concurrent sending to evalCh", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
|
|
|
time1 := time.UnixMilli(rand.Int63n(math.MaxInt64))
|
|
|
|
time2 := time.UnixMilli(rand.Int63n(math.MaxInt64))
|
|
|
|
resultCh1 := make(chan evalResponse)
|
|
|
|
resultCh2 := make(chan evalResponse)
|
2022-08-31 10:08:19 -05:00
|
|
|
data := &evaluation{
|
|
|
|
scheduledAt: time1,
|
|
|
|
rule: models.AlertRuleGen()(),
|
|
|
|
folderTitle: util.GenerateShortUID(),
|
|
|
|
}
|
|
|
|
data2 := &evaluation{
|
|
|
|
scheduledAt: time2,
|
|
|
|
rule: data.rule,
|
|
|
|
folderTitle: data.folderTitle,
|
|
|
|
}
|
2022-06-08 11:50:44 -05:00
|
|
|
wg := sync.WaitGroup{}
|
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
wg.Done()
|
2022-08-31 10:08:19 -05:00
|
|
|
result, dropped := r.eval(data)
|
2022-06-08 11:50:44 -05:00
|
|
|
wg.Done()
|
|
|
|
resultCh1 <- evalResponse{result, dropped}
|
|
|
|
}()
|
|
|
|
wg.Wait()
|
|
|
|
wg.Add(2) // one when time1 is sent, another when go-routine for time2 has started
|
|
|
|
go func() {
|
|
|
|
wg.Done()
|
2022-08-31 10:08:19 -05:00
|
|
|
result, dropped := r.eval(data2)
|
2022-06-08 11:50:44 -05:00
|
|
|
resultCh2 <- evalResponse{result, dropped}
|
|
|
|
}()
|
|
|
|
wg.Wait() // at this point tick 1 has already been dropped
|
|
|
|
select {
|
|
|
|
case ctx := <-r.evalCh:
|
|
|
|
require.Equal(t, time2, ctx.scheduledAt)
|
|
|
|
result := <-resultCh1
|
|
|
|
require.True(t, result.success)
|
|
|
|
require.Nilf(t, result.droppedEval, "expected no dropped evaluations but got one")
|
|
|
|
result = <-resultCh2
|
|
|
|
require.True(t, result.success)
|
|
|
|
require.NotNil(t, result.droppedEval, "expected no dropped evaluations but got one")
|
|
|
|
require.Equal(t, time1, result.droppedEval.scheduledAt)
|
2022-05-11 09:04:50 -05:00
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
t.Fatal("No message was received on eval channel")
|
|
|
|
}
|
|
|
|
})
|
|
|
|
t.Run("eval should exit when context is cancelled", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
2022-06-08 11:50:44 -05:00
|
|
|
resultCh := make(chan evalResponse)
|
2022-08-31 10:08:19 -05:00
|
|
|
data := &evaluation{
|
|
|
|
scheduledAt: time.Now(),
|
|
|
|
rule: models.AlertRuleGen()(),
|
|
|
|
folderTitle: util.GenerateShortUID(),
|
|
|
|
}
|
2022-05-11 09:04:50 -05:00
|
|
|
go func() {
|
2022-08-31 10:08:19 -05:00
|
|
|
result, dropped := r.eval(data)
|
2022-06-08 11:50:44 -05:00
|
|
|
resultCh <- evalResponse{result, dropped}
|
2022-05-11 09:04:50 -05:00
|
|
|
}()
|
|
|
|
runtime.Gosched()
|
2022-08-25 13:12:22 -05:00
|
|
|
r.stop(nil)
|
2022-05-11 09:04:50 -05:00
|
|
|
select {
|
|
|
|
case result := <-resultCh:
|
2022-06-08 11:50:44 -05:00
|
|
|
require.False(t, result.success)
|
|
|
|
require.Nilf(t, result.droppedEval, "expected no dropped evaluations but got one")
|
2022-05-11 09:04:50 -05:00
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
t.Fatal("No message was received on eval channel")
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
t.Run("when rule evaluation is stopped", func(t *testing.T) {
|
|
|
|
t.Run("Update should do nothing", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
2022-08-25 13:12:22 -05:00
|
|
|
r.stop(errRuleDeleted)
|
|
|
|
require.ErrorIs(t, r.ctx.Err(), errRuleDeleted)
|
2022-07-15 11:32:52 -05:00
|
|
|
require.False(t, r.update(ruleVersion(rand.Int63())))
|
2022-05-11 09:04:50 -05:00
|
|
|
})
|
|
|
|
t.Run("eval should do nothing", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
2022-08-25 13:12:22 -05:00
|
|
|
r.stop(nil)
|
2022-08-31 10:08:19 -05:00
|
|
|
data := &evaluation{
|
|
|
|
scheduledAt: time.Now(),
|
|
|
|
rule: models.AlertRuleGen()(),
|
|
|
|
folderTitle: util.GenerateShortUID(),
|
|
|
|
}
|
|
|
|
success, dropped := r.eval(data)
|
2022-06-08 11:50:44 -05:00
|
|
|
require.False(t, success)
|
|
|
|
require.Nilf(t, dropped, "expected no dropped evaluations but got one")
|
2022-05-11 09:04:50 -05:00
|
|
|
})
|
|
|
|
t.Run("stop should do nothing", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
2022-08-25 13:12:22 -05:00
|
|
|
r.stop(nil)
|
|
|
|
r.stop(nil)
|
|
|
|
})
|
|
|
|
t.Run("stop should do nothing if parent context stopped", func(t *testing.T) {
|
|
|
|
ctx, cancelFn := context.WithCancel(context.Background())
|
|
|
|
r := newAlertRuleInfo(ctx)
|
|
|
|
cancelFn()
|
|
|
|
r.stop(nil)
|
2022-05-11 09:04:50 -05:00
|
|
|
})
|
|
|
|
})
|
|
|
|
t.Run("should be thread-safe", func(t *testing.T) {
|
|
|
|
r := newAlertRuleInfo(context.Background())
|
|
|
|
wg := sync.WaitGroup{}
|
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-r.evalCh:
|
|
|
|
time.Sleep(time.Microsecond)
|
|
|
|
case <-r.updateCh:
|
|
|
|
time.Sleep(time.Microsecond)
|
|
|
|
case <-r.ctx.Done():
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
for i := 0; i < 10; i++ {
|
|
|
|
wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
for i := 0; i < 20; i++ {
|
|
|
|
max := 3
|
|
|
|
if i <= 10 {
|
|
|
|
max = 2
|
|
|
|
}
|
|
|
|
switch rand.Intn(max) + 1 {
|
|
|
|
case 1:
|
2022-07-15 11:32:52 -05:00
|
|
|
r.update(ruleVersion(rand.Int63()))
|
2022-05-11 09:04:50 -05:00
|
|
|
case 2:
|
2022-08-31 10:08:19 -05:00
|
|
|
r.eval(&evaluation{
|
|
|
|
scheduledAt: time.Now(),
|
|
|
|
rule: models.AlertRuleGen()(),
|
|
|
|
folderTitle: util.GenerateShortUID(),
|
|
|
|
})
|
2022-05-11 09:04:50 -05:00
|
|
|
case 3:
|
2022-08-25 13:12:22 -05:00
|
|
|
r.stop(nil)
|
2022-05-11 09:04:50 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
wg.Done()
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
|
|
|
wg.Wait()
|
|
|
|
})
|
|
|
|
}
|
2022-06-07 10:20:06 -05:00
|
|
|
|
|
|
|
func TestSchedulableAlertRulesRegistry(t *testing.T) {
|
2022-07-26 08:40:06 -05:00
|
|
|
r := alertRulesRegistry{rules: make(map[models.AlertRuleKey]*models.AlertRule)}
|
2022-08-31 10:08:19 -05:00
|
|
|
rules, folders := r.all()
|
|
|
|
assert.Len(t, rules, 0)
|
|
|
|
assert.Len(t, folders, 0)
|
2022-06-07 10:20:06 -05:00
|
|
|
|
2022-08-31 10:08:19 -05:00
|
|
|
expectedFolders := map[string]string{"test-uid": "test-title"}
|
2022-06-07 10:20:06 -05:00
|
|
|
// replace all rules in the registry with foo
|
2022-08-31 10:08:19 -05:00
|
|
|
r.set([]*models.AlertRule{{OrgID: 1, UID: "foo", Version: 1}}, expectedFolders)
|
|
|
|
rules, folders = r.all()
|
|
|
|
assert.Len(t, rules, 1)
|
|
|
|
assert.Equal(t, expectedFolders, folders)
|
|
|
|
|
2022-06-07 10:20:06 -05:00
|
|
|
foo := r.get(models.AlertRuleKey{OrgID: 1, UID: "foo"})
|
|
|
|
require.NotNil(t, foo)
|
2022-07-26 08:40:06 -05:00
|
|
|
assert.Equal(t, models.AlertRule{OrgID: 1, UID: "foo", Version: 1}, *foo)
|
2022-06-07 10:20:06 -05:00
|
|
|
|
|
|
|
// update foo to a newer version
|
2022-07-26 08:40:06 -05:00
|
|
|
r.update(&models.AlertRule{OrgID: 1, UID: "foo", Version: 2})
|
2022-08-31 10:08:19 -05:00
|
|
|
rules, _ = r.all()
|
|
|
|
assert.Len(t, rules, 1)
|
2022-06-07 10:20:06 -05:00
|
|
|
foo = r.get(models.AlertRuleKey{OrgID: 1, UID: "foo"})
|
|
|
|
require.NotNil(t, foo)
|
2022-07-26 08:40:06 -05:00
|
|
|
assert.Equal(t, models.AlertRule{OrgID: 1, UID: "foo", Version: 2}, *foo)
|
2022-06-07 10:20:06 -05:00
|
|
|
|
|
|
|
// update bar which does not exist in the registry
|
2022-07-26 08:40:06 -05:00
|
|
|
r.update(&models.AlertRule{OrgID: 1, UID: "bar", Version: 1})
|
2022-08-31 10:08:19 -05:00
|
|
|
rules, _ = r.all()
|
|
|
|
assert.Len(t, rules, 2)
|
2022-06-07 10:20:06 -05:00
|
|
|
foo = r.get(models.AlertRuleKey{OrgID: 1, UID: "foo"})
|
|
|
|
require.NotNil(t, foo)
|
2022-07-26 08:40:06 -05:00
|
|
|
assert.Equal(t, models.AlertRule{OrgID: 1, UID: "foo", Version: 2}, *foo)
|
2022-06-07 10:20:06 -05:00
|
|
|
bar := r.get(models.AlertRuleKey{OrgID: 1, UID: "bar"})
|
|
|
|
require.NotNil(t, foo)
|
2022-07-26 08:40:06 -05:00
|
|
|
assert.Equal(t, models.AlertRule{OrgID: 1, UID: "bar", Version: 1}, *bar)
|
2022-06-07 10:20:06 -05:00
|
|
|
|
|
|
|
// replace all rules in the registry with baz
|
2022-08-31 10:08:19 -05:00
|
|
|
r.set([]*models.AlertRule{{OrgID: 1, UID: "baz", Version: 1}}, nil)
|
|
|
|
rules, folders = r.all()
|
|
|
|
assert.Len(t, rules, 1)
|
|
|
|
assert.Nil(t, folders)
|
2022-06-07 10:20:06 -05:00
|
|
|
baz := r.get(models.AlertRuleKey{OrgID: 1, UID: "baz"})
|
|
|
|
require.NotNil(t, baz)
|
2022-07-26 08:40:06 -05:00
|
|
|
assert.Equal(t, models.AlertRule{OrgID: 1, UID: "baz", Version: 1}, *baz)
|
2022-06-07 10:20:06 -05:00
|
|
|
assert.Nil(t, r.get(models.AlertRuleKey{OrgID: 1, UID: "foo"}))
|
|
|
|
assert.Nil(t, r.get(models.AlertRuleKey{OrgID: 1, UID: "bar"}))
|
|
|
|
|
|
|
|
// delete baz
|
|
|
|
deleted, ok := r.del(models.AlertRuleKey{OrgID: 1, UID: "baz"})
|
|
|
|
assert.True(t, ok)
|
|
|
|
require.NotNil(t, deleted)
|
|
|
|
assert.Equal(t, *deleted, *baz)
|
2022-08-31 10:08:19 -05:00
|
|
|
rules, folders = r.all()
|
|
|
|
assert.Len(t, rules, 0)
|
|
|
|
assert.Len(t, folders, 0)
|
2022-06-07 10:20:06 -05:00
|
|
|
assert.Nil(t, r.get(models.AlertRuleKey{OrgID: 1, UID: "baz"}))
|
|
|
|
|
|
|
|
// baz cannot be deleted twice
|
|
|
|
deleted, ok = r.del(models.AlertRuleKey{OrgID: 1, UID: "baz"})
|
|
|
|
assert.False(t, ok)
|
|
|
|
assert.Nil(t, deleted)
|
|
|
|
}
|