Alerting: Persist silence state immediately on Create/Delete (#84705)

* Alerting: Persist silence state immediately on Create/Delete

Persists the silence state to the kvstore immediately instead of waiting for the
 next maintenance run. This is used after Create/Delete to prevent silences from
 being lost when a new Alertmanager is started before the state has persisted.
 This can happen, for example, in a rolling deployment scenario.

* Fix test that requires real data

* Don't error if silence state persist fails, maintenance will correct
This commit is contained in:
Matthew Jacobson 2024-04-09 13:39:34 -04:00 committed by GitHub
parent c7e4baff75
commit f79dd7c7f9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 449 additions and 142 deletions

2
go.mod
View File

@ -47,7 +47,7 @@ require (
github.com/google/uuid v1.6.0 // @grafana/backend-platform
github.com/google/wire v0.5.0 // @grafana/backend-platform
github.com/gorilla/websocket v1.5.0 // @grafana/grafana-app-platform-squad
github.com/grafana/alerting v0.0.0-20240405171255-923446b51c2d // @grafana/alerting-squad-backend
github.com/grafana/alerting v0.0.0-20240409171830-e039a7f57a92 // @grafana/alerting-squad-backend
github.com/grafana/cuetsy v0.1.11 // @grafana/grafana-as-code
github.com/grafana/grafana-aws-sdk v0.25.0 // @grafana/aws-datasources
github.com/grafana/grafana-azure-sdk-go/v2 v2.0.1 // @grafana/partner-datasources

4
go.sum
View File

@ -2159,8 +2159,8 @@ github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/ad
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grafana/alerting v0.0.0-20240405171255-923446b51c2d h1:wu7A06T+xFTDGUfrR6oVdM3k+vp1MLRjsXd7dckHfGA=
github.com/grafana/alerting v0.0.0-20240405171255-923446b51c2d/go.mod h1:TfSmMRify3r/yFbVM7tJGT44qUkBQ3ZWka8jwSUHQL4=
github.com/grafana/alerting v0.0.0-20240409171830-e039a7f57a92 h1:DfzT76QLAyvJZiQMEnHTRRkQsvOF0S2jH9bO1OIX5yw=
github.com/grafana/alerting v0.0.0-20240409171830-e039a7f57a92/go.mod h1:jriidrGFnZWvAn3xw/BOoCkJbGWnncuojMi9jq6xxwE=
github.com/grafana/authlib v0.0.0-20240328140636-a7388d0bac72 h1:lGEuhD/KhhN1OiPrvwQejl9Lg8MvaHdj3lHZNref4is=
github.com/grafana/authlib v0.0.0-20240328140636-a7388d0bac72/go.mod h1:86rRD5P6u2JPWtNWTMOlqlU+YMv2fUvVz/DomA6L7w4=
github.com/grafana/codejen v0.0.3 h1:tAWxoTUuhgmEqxJPOLtJoxlPBbMULFwKFOcRsPRPXDw=

View File

@ -65,11 +65,6 @@ func (srv AlertmanagerSrv) RouteCreateSilence(c *contextmodel.ReqContext, postab
return ErrResp(http.StatusBadRequest, err, "silence failed validation")
}
am, errResp := srv.AlertmanagerFor(c.SignedInUser.GetOrgID())
if errResp != nil {
return errResp
}
action := accesscontrol.ActionAlertingInstanceUpdate
if postableSilence.ID == "" {
action = accesscontrol.ActionAlertingInstanceCreate
@ -83,8 +78,15 @@ func (srv AlertmanagerSrv) RouteCreateSilence(c *contextmodel.ReqContext, postab
return response.Err(authz.NewAuthorizationErrorWithPermissions(fmt.Sprintf("%s silences", errAction), evaluator))
}
silenceID, err := am.CreateSilence(c.Req.Context(), &postableSilence)
silenceID, err := srv.mam.CreateSilence(c.Req.Context(), c.SignedInUser.GetOrgID(), &postableSilence)
if err != nil {
if errors.Is(err, notifier.ErrNoAlertmanagerForOrg) {
return ErrResp(http.StatusNotFound, err, "")
}
if errors.Is(err, notifier.ErrAlertmanagerNotReady) {
return ErrResp(http.StatusConflict, err, "")
}
if errors.Is(err, alertingNotify.ErrSilenceNotFound) {
return ErrResp(http.StatusNotFound, err, "")
}
@ -115,12 +117,13 @@ func (srv AlertmanagerSrv) RouteDeleteAlertingConfig(c *contextmodel.ReqContext)
}
func (srv AlertmanagerSrv) RouteDeleteSilence(c *contextmodel.ReqContext, silenceID string) response.Response {
am, errResp := srv.AlertmanagerFor(c.SignedInUser.GetOrgID())
if errResp != nil {
return errResp
}
if err := am.DeleteSilence(c.Req.Context(), silenceID); err != nil {
if err := srv.mam.DeleteSilence(c.Req.Context(), c.SignedInUser.GetOrgID(), silenceID); err != nil {
if errors.Is(err, notifier.ErrNoAlertmanagerForOrg) {
return ErrResp(http.StatusNotFound, err, "")
}
if errors.Is(err, notifier.ErrAlertmanagerNotReady) {
return ErrResp(http.StatusConflict, err, "")
}
if errors.Is(err, alertingNotify.ErrSilenceNotFound) {
return ErrResp(http.StatusNotFound, err, "")
}

View File

@ -417,6 +417,11 @@ func (am *alertmanager) PutAlerts(_ context.Context, postableAlerts apimodels.Po
return am.Base.PutAlerts(alerts)
}
// SilenceState returns the current internal state of silences.
func (am *alertmanager) SilenceState(_ context.Context) (alertingNotify.SilenceState, error) {
return am.Base.SilenceState()
}
// AlertValidationError is the error capturing the validation errors
// faced on the alerts.
type AlertValidationError struct {

View File

@ -711,6 +711,60 @@ func (_c *AlertmanagerMock_SaveAndApplyDefaultConfig_Call) RunAndReturn(run func
return _c
}
// SilenceState provides a mock function with given fields: _a0
func (_m *AlertmanagerMock) SilenceState(_a0 context.Context) (notify.SilenceState, error) {
ret := _m.Called(_a0)
var r0 notify.SilenceState
var r1 error
if rf, ok := ret.Get(0).(func(context.Context) (notify.SilenceState, error)); ok {
return rf(_a0)
}
if rf, ok := ret.Get(0).(func(context.Context) notify.SilenceState); ok {
r0 = rf(_a0)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(notify.SilenceState)
}
}
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(_a0)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// AlertmanagerMock_SilenceState_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SilenceState'
type AlertmanagerMock_SilenceState_Call struct {
*mock.Call
}
// SilenceState is a helper method to define mock.On call
// - _a0 context.Context
func (_e *AlertmanagerMock_Expecter) SilenceState(_a0 interface{}) *AlertmanagerMock_SilenceState_Call {
return &AlertmanagerMock_SilenceState_Call{Call: _e.mock.On("SilenceState", _a0)}
}
func (_c *AlertmanagerMock_SilenceState_Call) Run(run func(_a0 context.Context)) *AlertmanagerMock_SilenceState_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context))
})
return _c
}
func (_c *AlertmanagerMock_SilenceState_Call) Return(_a0 notify.SilenceState, _a1 error) *AlertmanagerMock_SilenceState_Call {
_c.Call.Return(_a0, _a1)
return _c
}
func (_c *AlertmanagerMock_SilenceState_Call) RunAndReturn(run func(context.Context) (notify.SilenceState, error)) *AlertmanagerMock_SilenceState_Call {
_c.Call.Return(run)
return _c
}
// StopAndWait provides a mock function with given fields:
func (_m *AlertmanagerMock) StopAndWait() {
_m.Called()

View File

@ -6,9 +6,10 @@ import (
"sync"
"time"
alertingCluster "github.com/grafana/alerting/cluster"
"github.com/prometheus/client_golang/prometheus"
alertingCluster "github.com/grafana/alerting/cluster"
alertingNotify "github.com/grafana/alerting/notify"
"github.com/grafana/grafana/pkg/infra/kvstore"
@ -42,6 +43,10 @@ type Alertmanager interface {
GetSilence(context.Context, string) (apimodels.GettableSilence, error)
ListSilences(context.Context, []string) (apimodels.GettableSilences, error)
// SilenceState returns the current state of silences in the Alertmanager. This is used to persist the state
// to the kvstore.
SilenceState(context.Context) (alertingNotify.SilenceState, error)
// Alerts
GetAlerts(ctx context.Context, active, silenced, inhibited bool, filter []string, receiver string) (apimodels.GettableAlerts, error)
GetAlertGroups(ctx context.Context, active, silenced, inhibited bool, filter []string, receiver string) (apimodels.AlertGroups, error)
@ -395,6 +400,83 @@ func (moa *MultiOrgAlertmanager) AlertmanagerFor(orgID int64) (Alertmanager, err
return orgAM, nil
}
// CreateSilence creates a silence in the Alertmanager for the organization provided, returning the silence ID. It will
// also persist the silence state to the kvstore immediately after creating the silence.
func (moa *MultiOrgAlertmanager) CreateSilence(ctx context.Context, orgID int64, ps *alertingNotify.PostableSilence) (string, error) {
moa.alertmanagersMtx.RLock()
defer moa.alertmanagersMtx.RUnlock()
orgAM, existing := moa.alertmanagers[orgID]
if !existing {
return "", ErrNoAlertmanagerForOrg
}
if !orgAM.Ready() {
return "", ErrAlertmanagerNotReady
}
// Need to create the silence in the AM first to get the silence ID.
silenceID, err := orgAM.CreateSilence(ctx, ps)
if err != nil {
return "", err
}
err = moa.updateSilenceState(ctx, orgAM, orgID)
if err != nil {
moa.logger.Warn("Failed to persist silence state on create, will be corrected by next maintenance run", "orgID", orgID, "silenceID", silenceID, "error", err)
}
return silenceID, nil
}
// DeleteSilence deletes a silence in the Alertmanager for the organization provided. It will also persist the silence
// state to the kvstore immediately after deleting the silence.
func (moa *MultiOrgAlertmanager) DeleteSilence(ctx context.Context, orgID int64, silenceID string) error {
moa.alertmanagersMtx.RLock()
defer moa.alertmanagersMtx.RUnlock()
orgAM, existing := moa.alertmanagers[orgID]
if !existing {
return ErrNoAlertmanagerForOrg
}
if !orgAM.Ready() {
return ErrAlertmanagerNotReady
}
err := orgAM.DeleteSilence(ctx, silenceID)
if err != nil {
return err
}
err = moa.updateSilenceState(ctx, orgAM, orgID)
if err != nil {
moa.logger.Warn("Failed to persist silence state on delete, will be corrected by next maintenance run", "orgID", orgID, "silenceID", silenceID, "error", err)
}
return nil
}
// updateSilenceState persists the silence state to the kvstore immediately instead of waiting for the next maintenance
// run. This is used after Create/Delete to prevent silences from being lost when a new Alertmanager is started before
// the state has persisted. This can happen, for example, in a rolling deployment scenario.
func (moa *MultiOrgAlertmanager) updateSilenceState(ctx context.Context, orgAM Alertmanager, orgID int64) error {
// Collect the internal silence state from the AM.
// TODO: Currently, we rely on the AM itself for the persisted silence state representation. Preferably, we would
// define the state ourselves and persist it in a format that is easy to guarantee consistency for writes to
// individual silences. In addition to the consistency benefits, this would also allow us to avoid the need for
// a network request to the AM to get the state in the case of remote alertmanagers.
silences, err := orgAM.SilenceState(ctx)
if err != nil {
return err
}
// Persist to kvstore.
fs := NewFileStore(orgID, moa.kvStore)
_, err = fs.SaveSilences(ctx, silences)
return err
}
// NilPeer and NilChannel implements the Alertmanager clustering interface.
type NilPeer struct{}

View File

@ -126,8 +126,8 @@ func TestMultiorgAlertmanager_RemoteSecondaryMode(t *testing.T) {
OrgID: 1,
LastApplied: time.Now().Unix(),
}))
require.NoError(t, kvStore.Set(ctx, 1, "alertmanager", notifier.SilencesFilename, "dGVzdAo=")) // base64-encoded string "test"
require.NoError(t, kvStore.Set(ctx, 1, "alertmanager", notifier.NotificationLogFilename, "dGVzdAo=")) // base64-encoded string "test"
require.NoError(t, kvStore.Set(ctx, 1, "alertmanager", notifier.SilencesFilename, "lwEKhgEKATISFxIJYWxlcnRuYW1lGgp0ZXN0X2FsZXJ0EiMSDmdyYWZhbmFfZm9sZGVyGhF0ZXN0X2FsZXJ0X2ZvbGRlchoMCN2CkbAGEJbKrMsDIgwI7Z6RsAYQlsqsywMqCwiAkrjDmP7///8BQgxHcmFmYW5hIFRlc3RKDFRlc3QgU2lsZW5jZRIMCO2ekbAGEJbKrMsDlwEKhgEKATESFxIJYWxlcnRuYW1lGgp0ZXN0X2FsZXJ0EiMSDmdyYWZhbmFfZm9sZGVyGhF0ZXN0X2FsZXJ0X2ZvbGRlchoMCN2CkbAGEJbKrMsDIgwI7Z6RsAYQlsqsywMqCwiAkrjDmP7///8BQgxHcmFmYW5hIFRlc3RKDFRlc3QgU2lsZW5jZRIMCO2ekbAGEJbKrMsD"))
require.NoError(t, kvStore.Set(ctx, 1, "alertmanager", notifier.NotificationLogFilename, "OgoqCgZncm91cDISEgoJcmVjZWl2ZXIyEgV0ZXN0MyoMCLSDkbAGEMvaofYCEgwIxJ+RsAYQy9qh9gI6CioKBmdyb3VwMRISCglyZWNlaXZlcjESBXRlc3QzKgwItIORsAYQy9qh9gISDAjEn5GwBhDL2qH2Ag=="))
// The sync interval (10s) has not elapsed yet, syncing should have no effect.
require.NoError(t, moa.LoadAndSyncAlertmanagersForOrgs(ctx))
@ -151,8 +151,8 @@ func TestMultiorgAlertmanager_RemoteSecondaryMode(t *testing.T) {
OrgID: 1,
LastApplied: time.Now().Unix(),
}))
require.NoError(t, kvStore.Set(ctx, 1, "alertmanager", notifier.SilencesFilename, "dGVzdC0yCg==")) // base64-encoded string "test-2"
require.NoError(t, kvStore.Set(ctx, 1, "alertmanager", notifier.NotificationLogFilename, "dGVzdC0yCg==")) // base64-encoded string "test-2"
require.NoError(t, kvStore.Set(ctx, 1, "alertmanager", notifier.SilencesFilename, "lwEKhgEKAWESFxIJYWxlcnRuYW1lGgp0ZXN0X2FsZXJ0EiMSDmdyYWZhbmFfZm9sZGVyGhF0ZXN0X2FsZXJ0X2ZvbGRlchoMCPuEkbAGEK3AhM8CIgwIi6GRsAYQrcCEzwIqCwiAkrjDmP7///8BQgxHcmFmYW5hIFRlc3RKDFRlc3QgU2lsZW5jZRIMCIuhkbAGEK3AhM8ClwEKhgEKAWISFxIJYWxlcnRuYW1lGgp0ZXN0X2FsZXJ0EiMSDmdyYWZhbmFfZm9sZGVyGhF0ZXN0X2FsZXJ0X2ZvbGRlchoMCPuEkbAGEK3AhM8CIgwIi6GRsAYQrcCEzwIqCwiAkrjDmP7///8BQgxHcmFmYW5hIFRlc3RKDFRlc3QgU2lsZW5jZRIMCIuhkbAGEK3AhM8C"))
require.NoError(t, kvStore.Set(ctx, 1, "alertmanager", notifier.NotificationLogFilename, "OAopCgZncm91cEESEgoJcmVjZWl2ZXJBEgV0ZXN0MyoLCNmEkbAGEOzO0BUSCwjpoJGwBhDsztAVOAopCgZncm91cEISEgoJcmVjZWl2ZXJCEgV0ZXN0MyoLCNmEkbAGEOzO0BUSCwjpoJGwBhDsztAV"))
// Both state and config should be updated when shutting the Alertmanager down.
moa.StopAndWait()

View File

@ -6,6 +6,8 @@ import (
"testing"
"time"
alertingNotify "github.com/grafana/alerting/notify"
"github.com/prometheus/alertmanager/types"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/testutil"
"github.com/stretchr/testify/require"
@ -22,18 +24,7 @@ import (
)
func TestMultiOrgAlertmanager_SyncAlertmanagersForOrgs(t *testing.T) {
configStore := NewFakeConfigStore(t, map[int64]*models.AlertConfiguration{})
orgStore := &FakeOrgStore{
orgs: []int64{1, 2, 3},
}
tmpDir := t.TempDir()
kvStore := ngfakes.NewFakeKVStore(t)
provStore := ngfakes.NewFakeProvisioningStore()
secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
decryptFn := secretsService.GetDecryptedValue
reg := prometheus.NewPedanticRegistry()
m := metrics.NewNGAlert(reg)
cfg := &setting.Cfg{
DataPath: tmpDir,
UnifiedAlerting: setting.UnifiedAlertingSettings{
@ -42,8 +33,9 @@ func TestMultiOrgAlertmanager_SyncAlertmanagersForOrgs(t *testing.T) {
DisabledOrgs: map[int64]struct{}{5: {}},
}, // do not poll in tests.
}
mam, err := NewMultiOrgAlertmanager(cfg, configStore, orgStore, kvStore, provStore, decryptFn, m.GetMultiOrgAlertmanagerMetrics(), nil, log.New("testlogger"), secretsService, featuremgmt.WithFeatures())
require.NoError(t, err)
mam := setupMam(t, cfg)
reg := mam.metrics.Registerer.(*prometheus.Registry)
orgStore := mam.orgStore.(*FakeOrgStore)
ctx := context.Background()
// Ensure that one Alertmanager is created per org.
@ -61,7 +53,7 @@ grafana_alerting_discovered_configurations 3
// Configurations should be marked as successfully applied.
for _, org := range orgStore.orgs {
configs, err := configStore.GetAppliedConfigurations(ctx, org, 10)
configs, err := mam.configStore.GetAppliedConfigurations(ctx, org, 10)
require.NoError(t, err)
require.Len(t, configs, 1)
}
@ -106,58 +98,40 @@ grafana_alerting_discovered_configurations 4
orgID := int64(6)
// Populate the kvstore with orphaned records.
err = kvStore.Set(ctx, orgID, KVNamespace, SilencesFilename, "file_1")
err := mam.kvStore.Set(ctx, orgID, KVNamespace, SilencesFilename, "file_1")
require.NoError(t, err)
err = kvStore.Set(ctx, orgID, KVNamespace, NotificationLogFilename, "file_1")
err = mam.kvStore.Set(ctx, orgID, KVNamespace, NotificationLogFilename, "file_1")
require.NoError(t, err)
// Now re run the sync job once.
require.NoError(t, mam.LoadAndSyncAlertmanagersForOrgs(ctx))
// The organization kvstore records should be gone by now.
_, exists, _ := kvStore.Get(ctx, orgID, KVNamespace, SilencesFilename)
_, exists, _ := mam.kvStore.Get(ctx, orgID, KVNamespace, SilencesFilename)
require.False(t, exists)
_, exists, _ = kvStore.Get(ctx, orgID, KVNamespace, NotificationLogFilename)
_, exists, _ = mam.kvStore.Get(ctx, orgID, KVNamespace, NotificationLogFilename)
require.False(t, exists)
}
}
func TestMultiOrgAlertmanager_SyncAlertmanagersForOrgsWithFailures(t *testing.T) {
mam := setupMam(t, nil)
ctx := context.Background()
// Include a broken configuration for organization 2.
var orgWithBadConfig int64 = 2
configStore := NewFakeConfigStore(t, map[int64]*models.AlertConfiguration{
mam.configStore = NewFakeConfigStore(t, map[int64]*models.AlertConfiguration{
2: {AlertmanagerConfiguration: brokenConfig, OrgID: orgWithBadConfig},
})
orgs := []int64{1, 2, 3}
orgStore := &FakeOrgStore{
orgs: orgs,
}
tmpDir := t.TempDir()
kvStore := ngfakes.NewFakeKVStore(t)
provStore := ngfakes.NewFakeProvisioningStore()
secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
decryptFn := secretsService.GetDecryptedValue
reg := prometheus.NewPedanticRegistry()
m := metrics.NewNGAlert(reg)
cfg := &setting.Cfg{
DataPath: tmpDir,
UnifiedAlerting: setting.UnifiedAlertingSettings{
AlertmanagerConfigPollInterval: 10 * time.Minute,
DefaultConfiguration: setting.GetAlertmanagerDefaultConfiguration(),
}, // do not poll in tests.
}
mam, err := NewMultiOrgAlertmanager(cfg, configStore, orgStore, kvStore, provStore, decryptFn, m.GetMultiOrgAlertmanagerMetrics(), nil, log.New("testlogger"), secretsService, featuremgmt.WithFeatures())
orgs, err := mam.orgStore.GetOrgs(ctx)
require.NoError(t, err)
ctx := context.Background()
// No successfully applied configurations should be found at first.
{
for _, org := range orgs {
configs, err := configStore.GetAppliedConfigurations(ctx, org, 10)
configs, err := mam.configStore.GetAppliedConfigurations(ctx, org, 10)
require.NoError(t, err)
require.Len(t, configs, 0)
}
@ -173,7 +147,7 @@ func TestMultiOrgAlertmanager_SyncAlertmanagersForOrgsWithFailures(t *testing.T)
// Configurations should be marked as successfully applied for all orgs except for org 2.
for _, org := range orgs {
configs, err := configStore.GetAppliedConfigurations(ctx, org, 10)
configs, err := mam.configStore.GetAppliedConfigurations(ctx, org, 10)
require.NoError(t, err)
if org == orgWithBadConfig {
require.Len(t, configs, 0)
@ -193,7 +167,7 @@ func TestMultiOrgAlertmanager_SyncAlertmanagersForOrgsWithFailures(t *testing.T)
// The configuration should still be marked as successfully applied for all orgs except for org 2.
for _, org := range orgs {
configs, err := configStore.GetAppliedConfigurations(ctx, org, 10)
configs, err := mam.configStore.GetAppliedConfigurations(ctx, org, 10)
require.NoError(t, err)
if org == orgWithBadConfig {
require.Len(t, configs, 0)
@ -205,7 +179,7 @@ func TestMultiOrgAlertmanager_SyncAlertmanagersForOrgsWithFailures(t *testing.T)
// If we fix the configuration, it becomes ready.
{
configStore.configs = map[int64]*models.AlertConfiguration{} // It'll apply the default config.
mam.configStore.(*fakeConfigStore).configs = map[int64]*models.AlertConfiguration{} // It'll apply the default config.
require.NoError(t, mam.LoadAndSyncAlertmanagersForOrgs(ctx))
require.Len(t, mam.alertmanagers, 3)
require.True(t, mam.alertmanagers[1].Ready())
@ -214,7 +188,7 @@ func TestMultiOrgAlertmanager_SyncAlertmanagersForOrgsWithFailures(t *testing.T)
// All configurations should be marked as successfully applied.
for _, org := range orgs {
configs, err := configStore.GetAppliedConfigurations(ctx, org, 10)
configs, err := mam.configStore.GetAppliedConfigurations(ctx, org, 10)
require.NoError(t, err)
require.NotEqual(t, 0, len(configs))
}
@ -222,23 +196,7 @@ func TestMultiOrgAlertmanager_SyncAlertmanagersForOrgsWithFailures(t *testing.T)
}
func TestMultiOrgAlertmanager_AlertmanagerFor(t *testing.T) {
configStore := NewFakeConfigStore(t, map[int64]*models.AlertConfiguration{})
orgStore := &FakeOrgStore{
orgs: []int64{1, 2, 3},
}
tmpDir := t.TempDir()
cfg := &setting.Cfg{
DataPath: tmpDir,
UnifiedAlerting: setting.UnifiedAlertingSettings{AlertmanagerConfigPollInterval: 3 * time.Minute, DefaultConfiguration: setting.GetAlertmanagerDefaultConfiguration()}, // do not poll in tests.
}
kvStore := ngfakes.NewFakeKVStore(t)
provStore := ngfakes.NewFakeProvisioningStore()
secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
decryptFn := secretsService.GetDecryptedValue
reg := prometheus.NewPedanticRegistry()
m := metrics.NewNGAlert(reg)
mam, err := NewMultiOrgAlertmanager(cfg, configStore, orgStore, kvStore, provStore, decryptFn, m.GetMultiOrgAlertmanagerMetrics(), nil, log.New("testlogger"), secretsService, featuremgmt.WithFeatures())
require.NoError(t, err)
mam := setupMam(t, nil)
ctx := context.Background()
// Ensure that one Alertmanagers is created per org.
@ -264,7 +222,7 @@ func TestMultiOrgAlertmanager_AlertmanagerFor(t *testing.T) {
}
// Let's now remove the previous queried organization.
orgStore.orgs = []int64{1, 3}
mam.orgStore.(*FakeOrgStore).orgs = []int64{1, 3}
require.NoError(t, mam.LoadAndSyncAlertmanagersForOrgs(ctx))
{
_, err := mam.AlertmanagerFor(2)
@ -273,24 +231,7 @@ func TestMultiOrgAlertmanager_AlertmanagerFor(t *testing.T) {
}
func TestMultiOrgAlertmanager_ActivateHistoricalConfiguration(t *testing.T) {
configStore := NewFakeConfigStore(t, map[int64]*models.AlertConfiguration{})
orgStore := &FakeOrgStore{
orgs: []int64{1, 2, 3},
}
tmpDir := t.TempDir()
defaultConfig := `{"template_files":null,"alertmanager_config":{"route":{"receiver":"grafana-default-email","group_by":["grafana_folder","alertname"]},"templates":null,"receivers":[{"name":"grafana-default-email","grafana_managed_receiver_configs":[{"uid":"","name":"email receiver","type":"email","disableResolveMessage":false,"settings":{"addresses":"\u003cexample@email.com\u003e"},"secureSettings":null}]}]}}`
cfg := &setting.Cfg{
DataPath: tmpDir,
UnifiedAlerting: setting.UnifiedAlertingSettings{AlertmanagerConfigPollInterval: 3 * time.Minute, DefaultConfiguration: defaultConfig}, // do not poll in tests.
}
kvStore := ngfakes.NewFakeKVStore(t)
provStore := ngfakes.NewFakeProvisioningStore()
secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
decryptFn := secretsService.GetDecryptedValue
reg := prometheus.NewPedanticRegistry()
m := metrics.NewNGAlert(reg)
mam, err := NewMultiOrgAlertmanager(cfg, configStore, orgStore, kvStore, provStore, decryptFn, m.GetMultiOrgAlertmanagerMetrics(), nil, log.New("testlogger"), secretsService, featuremgmt.WithFeatures())
require.NoError(t, err)
mam := setupMam(t, nil)
ctx := context.Background()
// Ensure that one Alertmanager is created per org.
@ -339,9 +280,141 @@ func TestMultiOrgAlertmanager_ActivateHistoricalConfiguration(t *testing.T) {
// Verify that the org has the old default config.
cfgs, err = mam.getLatestConfigs(ctx)
require.NoError(t, err)
require.Equal(t, defaultConfig, cfgs[2].AlertmanagerConfiguration)
require.JSONEq(t, defaultConfig, cfgs[2].AlertmanagerConfiguration)
}
func TestMultiOrgAlertmanager_Silences(t *testing.T) {
mam := setupMam(t, nil)
ctx := context.Background()
// Ensure that one Alertmanager is created per org.
{
require.NoError(t, mam.LoadAndSyncAlertmanagersForOrgs(ctx))
require.Len(t, mam.alertmanagers, 3)
}
am, err := mam.AlertmanagerFor(1)
require.NoError(t, err)
// Confirm no silences.
silences, err := am.ListSilences(ctx, []string{})
require.NoError(t, err)
require.Len(t, silences, 0)
// Confirm empty state.
state, err := am.SilenceState(ctx)
require.NoError(t, err)
require.Len(t, state, 0)
// Confirm empty kvstore.
v, ok, err := mam.kvStore.Get(ctx, 1, KVNamespace, SilencesFilename)
require.NoError(t, err)
require.False(t, ok)
require.Empty(t, v)
// Create 2 silences.
sid, err := mam.CreateSilence(ctx, 1, GenSilence("test"))
require.NoError(t, err)
require.NotEmpty(t, sid)
sid2, err := mam.CreateSilence(ctx, 1, GenSilence("test"))
require.NoError(t, err)
require.NotEmpty(t, sid2)
// Confirm 2 silences.
silences, err = am.ListSilences(ctx, []string{})
require.NoError(t, err)
require.Len(t, silences, 2)
// Confirm 2 states.
state, err = am.SilenceState(ctx)
require.NoError(t, err)
require.Len(t, state, 2)
// Confirm 2 silences in the kvstore.
v, ok, err = mam.kvStore.Get(ctx, 1, KVNamespace, SilencesFilename)
require.NoError(t, err)
require.True(t, ok)
decoded, err := decode(v)
require.NoError(t, err)
state, err = alertingNotify.DecodeState(bytes.NewReader(decoded))
require.NoError(t, err)
require.Len(t, state, 2)
// Delete silence.
err = mam.DeleteSilence(ctx, 1, sid)
require.NoError(t, err)
// Confirm silence is expired in memory.
silence, err := am.GetSilence(ctx, sid)
require.NoError(t, err)
require.EqualValues(t, types.SilenceStateExpired, *silence.Status.State)
// Confirm silence is expired in kvstore.
v, ok, err = mam.kvStore.Get(ctx, 1, KVNamespace, SilencesFilename)
require.NoError(t, err)
require.True(t, ok)
decoded, err = decode(v)
require.NoError(t, err)
state, err = alertingNotify.DecodeState(bytes.NewReader(decoded))
require.NoError(t, err)
require.True(t, time.Now().After(state[sid].Silence.EndsAt)) // Expired.
}
func setupMam(t *testing.T, cfg *setting.Cfg) *MultiOrgAlertmanager {
if cfg == nil {
tmpDir := t.TempDir()
cfg = &setting.Cfg{
DataPath: tmpDir,
UnifiedAlerting: setting.UnifiedAlertingSettings{AlertmanagerConfigPollInterval: 3 * time.Minute, DefaultConfiguration: defaultConfig}, // do not poll in tests.
}
}
cs := NewFakeConfigStore(t, map[int64]*models.AlertConfiguration{})
orgStore := &FakeOrgStore{
orgs: []int64{1, 2, 3},
}
kvStore := ngfakes.NewFakeKVStore(t)
provStore := ngfakes.NewFakeProvisioningStore()
secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
decryptFn := secretsService.GetDecryptedValue
reg := prometheus.NewPedanticRegistry()
m := metrics.NewNGAlert(reg)
mam, err := NewMultiOrgAlertmanager(cfg, cs, orgStore, kvStore, provStore, decryptFn, m.GetMultiOrgAlertmanagerMetrics(), nil, log.New("testlogger"), secretsService, featuremgmt.WithFeatures())
require.NoError(t, err)
return mam
}
var defaultConfig = `
{
"template_files": null,
"alertmanager_config": {
"route": {
"receiver": "grafana-default-email",
"group_by": [
"grafana_folder",
"alertname"
]
},
"templates": null,
"receivers": [
{
"name": "grafana-default-email",
"grafana_managed_receiver_configs": [
{
"uid": "",
"name": "email receiver",
"type": "email",
"disableResolveMessage": false,
"settings": {
"addresses": "\u003cexample@email.com\u003e"},
"secureSettings": null
}
]
}
]
}
}`
var brokenConfig = `
"alertmanager_config": {
"route": {

View File

@ -7,14 +7,18 @@ import (
"errors"
"fmt"
"io"
"math/rand"
"testing"
"time"
"github.com/go-openapi/strfmt"
"github.com/matttproud/golang_protobuf_extensions/pbutil"
amv2 "github.com/prometheus/alertmanager/api/v2/models"
"github.com/prometheus/alertmanager/nflog/nflogpb"
"github.com/prometheus/alertmanager/silence/silencepb"
"github.com/prometheus/common/model"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
@ -357,3 +361,24 @@ func createNotificationLog(groupKey string, receiverName string, sentAt, expires
ExpiresAt: expiresAt,
}
}
func GenSilence(createdBy string) *apimodels.PostableSilence {
starts := strfmt.DateTime(time.Now().Add(time.Duration(rand.Int63n(9)+1) * time.Second))
ends := strfmt.DateTime(time.Now().Add(time.Duration(rand.Int63n(9)+10) * time.Second))
comment := "test comment"
isEqual := true
name := "test"
value := "test"
isRegex := false
matchers := amv2.Matchers{&amv2.Matcher{IsEqual: &isEqual, Name: &name, Value: &value, IsRegex: &isRegex}}
return &apimodels.PostableSilence{
Silence: amv2.Silence{
Comment: &comment,
CreatedBy: &createdBy,
Matchers: matchers,
StartsAt: &starts,
EndsAt: &ends,
},
}
}

View File

@ -8,6 +8,7 @@ import (
"fmt"
"net/http"
"net/url"
"strings"
"github.com/go-openapi/strfmt"
amalert "github.com/prometheus/alertmanager/api/v2/client/alert"
@ -15,6 +16,9 @@ import (
amreceiver "github.com/prometheus/alertmanager/api/v2/client/receiver"
amsilence "github.com/prometheus/alertmanager/api/v2/client/silence"
alertingClusterPB "github.com/grafana/alerting/cluster/clusterpb"
alertingNotify "github.com/grafana/alerting/notify"
"github.com/grafana/grafana/pkg/infra/log"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
@ -22,8 +26,6 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/notifier"
remoteClient "github.com/grafana/grafana/pkg/services/ngalert/remote/client"
"github.com/grafana/grafana/pkg/services/ngalert/sender"
alertingClusterPB "github.com/grafana/alerting/cluster/clusterpb"
)
type stateStore interface {
@ -401,15 +403,30 @@ func (am *Alertmanager) Ready() bool {
return am.ready
}
// SilenceState returns the Alertmanager's silence state as a SilenceState. Currently, does not retrieve the state
// remotely and instead uses the value from the state store.
func (am *Alertmanager) SilenceState(ctx context.Context) (alertingNotify.SilenceState, error) {
silences, err := am.state.GetSilences(ctx)
if err != nil {
return nil, fmt.Errorf("error getting silences: %w", err)
}
return alertingNotify.DecodeState(strings.NewReader(silences))
}
// getFullState returns a base64-encoded protobuf message representing the Alertmanager's internal state.
func (am *Alertmanager) getFullState(ctx context.Context) (string, error) {
var parts []alertingClusterPB.Part
silences, err := am.state.GetSilences(ctx)
state, err := am.SilenceState(ctx)
if err != nil {
return "", fmt.Errorf("error getting silences: %w", err)
}
parts = append(parts, alertingClusterPB.Part{Key: notifier.SilencesFilename, Data: []byte(silences)})
b, err := state.MarshalBinary()
if err != nil {
return "", fmt.Errorf("error marshalling silences: %w", err)
}
parts = append(parts, alertingClusterPB.Part{Key: notifier.SilencesFilename, Data: b})
notificationLog, err := am.state.GetNotificationLog(ctx)
if err != nil {
@ -420,7 +437,7 @@ func (am *Alertmanager) getFullState(ctx context.Context) (string, error) {
fs := alertingClusterPB.FullState{
Parts: parts,
}
b, err := fs.Marshal()
b, err = fs.Marshal()
if err != nil {
return "", fmt.Errorf("error marshaling full state: %w", err)
}

View File

@ -8,7 +8,6 @@ import (
"errors"
"fmt"
"io"
"math/rand"
"net/http"
"net/http/httptest"
"os"
@ -40,6 +39,12 @@ import (
const testGrafanaConfig = `{"template_files":{},"alertmanager_config":{"route":{"receiver":"grafana-default-email","group_by":["grafana_folder","alertname"]},"templates":null,"receivers":[{"name":"grafana-default-email","grafana_managed_receiver_configs":[{"uid":"","name":"some other name","type":"email","disableResolveMessage":false,"settings":{"addresses":"\u003cexample@email.com\u003e"},"secureSettings":null}]}]}}`
const testGrafanaConfigWithSecret = `{"template_files":{},"alertmanager_config":{"route":{"receiver":"grafana-default-email","group_by":["grafana_folder","alertname"]},"templates":null,"receivers":[{"name":"grafana-default-email","grafana_managed_receiver_configs":[{"uid":"dde6ntuob69dtf","name":"WH","type":"webhook","disableResolveMessage":false,"settings":{"url":"http://localhost:8080","username":"test"},"secureSettings":{"password":"test"}}]}]}}`
// Valid Alertmanager state base64 encoded.
const testSilence1 = "lwEKhgEKATESFxIJYWxlcnRuYW1lGgp0ZXN0X2FsZXJ0EiMSDmdyYWZhbmFfZm9sZGVyGhF0ZXN0X2FsZXJ0X2ZvbGRlchoMCN2CkbAGEJbKrMsDIgwI7Z6RsAYQlsqsywMqCwiAkrjDmP7///8BQgxHcmFmYW5hIFRlc3RKDFRlc3QgU2lsZW5jZRIMCO2ekbAGEJbKrMsD"
const testSilence2 = "lwEKhgEKATISFxIJYWxlcnRuYW1lGgp0ZXN0X2FsZXJ0EiMSDmdyYWZhbmFfZm9sZGVyGhF0ZXN0X2FsZXJ0X2ZvbGRlchoMCN2CkbAGEJbKrMsDIgwI7Z6RsAYQlsqsywMqCwiAkrjDmP7///8BQgxHcmFmYW5hIFRlc3RKDFRlc3QgU2lsZW5jZRIMCO2ekbAGEJbKrMsDlwEKhgEKATESFxIJYWxlcnRuYW1lGgp0ZXN0X2FsZXJ0EiMSDmdyYWZhbmFfZm9sZGVyGhF0ZXN0X2FsZXJ0X2ZvbGRlchoMCN2CkbAGEJbKrMsDIgwI7Z6RsAYQlsqsywMqCwiAkrjDmP7///8BQgxHcmFmYW5hIFRlc3RKDFRlc3QgU2lsZW5jZRIMCO2ekbAGEJbKrMsD"
const testNflog1 = "OgoqCgZncm91cDESEgoJcmVjZWl2ZXIxEgV0ZXN0MyoMCIzm1bAGEPqx5uEBEgwInILWsAYQ+rHm4QE="
const testNflog2 = "OgoqCgZncm91cDISEgoJcmVjZWl2ZXIyEgV0ZXN0MyoMCLSDkbAGEMvaofYCEgwIxJ+RsAYQy9qh9gI6CioKBmdyb3VwMRISCglyZWNlaXZlcjESBXRlc3QzKgwItIORsAYQy9qh9gISDAjEn5GwBhDL2qH2Ag=="
func TestMain(m *testing.M) {
testsuite.Run(m)
}
@ -145,8 +150,8 @@ func TestApplyConfig(t *testing.T) {
ctx := context.Background()
store := ngfakes.NewFakeKVStore(t)
fstore := notifier.NewFileStore(1, store)
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.SilencesFilename, "test"))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.NotificationLogFilename, "test"))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.SilencesFilename, testSilence1))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.NotificationLogFilename, testNflog1))
// An error response from the remote Alertmanager should result in the readiness check failing.
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
@ -293,14 +298,12 @@ func TestIntegrationRemoteAlertmanagerApplyConfigOnlyUploadsOnce(t *testing.T) {
OrgID: 1,
}
silences := []byte("test-silences")
nflog := []byte("test-notifications")
store := ngfakes.NewFakeKVStore(t)
fstore := notifier.NewFileStore(cfg.OrgID, store)
ctx := context.Background()
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.SilencesFilename, base64.StdEncoding.EncodeToString(silences)))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.NotificationLogFilename, base64.StdEncoding.EncodeToString(nflog)))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.SilencesFilename, testSilence1))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", notifier.NotificationLogFilename, testNflog1))
secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
m := metrics.NewRemoteAlertmanagerMetrics(prometheus.NewRegistry())
@ -344,8 +347,8 @@ func TestIntegrationRemoteAlertmanagerApplyConfigOnlyUploadsOnce(t *testing.T) {
// Calling `ApplyConfig` again with a changed configuration and state yields no effect.
{
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", "silences", base64.StdEncoding.EncodeToString([]byte("abc123"))))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", "notifications", base64.StdEncoding.EncodeToString([]byte("abc123"))))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", "silences", testSilence2))
require.NoError(t, store.Set(ctx, cfg.OrgID, "alertmanager", "notifications", testNflog2))
fakeConfig.ID = 30000000000000000
require.NoError(t, am.ApplyConfig(ctx, fakeConfig))
@ -401,8 +404,8 @@ func TestIntegrationRemoteAlertmanagerSilences(t *testing.T) {
require.Equal(t, 0, len(silences))
// Creating a silence should succeed.
testSilence := genSilence("test")
id, err := am.CreateSilence(context.Background(), &testSilence)
testSilence := notifier.GenSilence("test")
id, err := am.CreateSilence(context.Background(), testSilence)
require.NoError(t, err)
require.NotEmpty(t, id)
testSilence.ID = id
@ -417,8 +420,8 @@ func TestIntegrationRemoteAlertmanagerSilences(t *testing.T) {
require.Error(t, err)
// After creating another silence, the total amount should be 2.
testSilence2 := genSilence("test")
id, err = am.CreateSilence(context.Background(), &testSilence2)
testSilence2 := notifier.GenSilence("test")
id, err = am.CreateSilence(context.Background(), testSilence2)
require.NoError(t, err)
require.NotEmpty(t, id)
testSilence2.ID = id
@ -552,27 +555,6 @@ func TestIntegrationRemoteAlertmanagerReceivers(t *testing.T) {
require.Equal(t, "empty-receiver", *rcvs[0].Name)
}
func genSilence(createdBy string) apimodels.PostableSilence {
starts := strfmt.DateTime(time.Now().Add(time.Duration(rand.Int63n(9)+1) * time.Second))
ends := strfmt.DateTime(time.Now().Add(time.Duration(rand.Int63n(9)+10) * time.Second))
comment := "test comment"
isEqual := true
name := "test"
value := "test"
isRegex := false
matchers := amv2.Matchers{&amv2.Matcher{IsEqual: &isEqual, Name: &name, Value: &value, IsRegex: &isRegex}}
return apimodels.PostableSilence{
Silence: amv2.Silence{
Comment: &comment,
CreatedBy: &createdBy,
Matchers: matchers,
StartsAt: &starts,
EndsAt: &ends,
},
}
}
func genAlert(active bool, labels map[string]string) amv2.PostableAlert {
endsAt := time.Now()
if active {

View File

@ -804,6 +804,60 @@ func (_c *RemoteAlertmanagerMock_SaveAndApplyDefaultConfig_Call) RunAndReturn(ru
return _c
}
// SilenceState provides a mock function with given fields: _a0
func (_m *RemoteAlertmanagerMock) SilenceState(_a0 context.Context) (notify.SilenceState, error) {
ret := _m.Called(_a0)
var r0 notify.SilenceState
var r1 error
if rf, ok := ret.Get(0).(func(context.Context) (notify.SilenceState, error)); ok {
return rf(_a0)
}
if rf, ok := ret.Get(0).(func(context.Context) notify.SilenceState); ok {
r0 = rf(_a0)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(notify.SilenceState)
}
}
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(_a0)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// RemoteAlertmanagerMock_SilenceState_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SilenceState'
type RemoteAlertmanagerMock_SilenceState_Call struct {
*mock.Call
}
// SilenceState is a helper method to define mock.On call
// - _a0 context.Context
func (_e *RemoteAlertmanagerMock_Expecter) SilenceState(_a0 interface{}) *RemoteAlertmanagerMock_SilenceState_Call {
return &RemoteAlertmanagerMock_SilenceState_Call{Call: _e.mock.On("SilenceState", _a0)}
}
func (_c *RemoteAlertmanagerMock_SilenceState_Call) Run(run func(_a0 context.Context)) *RemoteAlertmanagerMock_SilenceState_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context))
})
return _c
}
func (_c *RemoteAlertmanagerMock_SilenceState_Call) Return(_a0 notify.SilenceState, _a1 error) *RemoteAlertmanagerMock_SilenceState_Call {
_c.Call.Return(_a0, _a1)
return _c
}
func (_c *RemoteAlertmanagerMock_SilenceState_Call) RunAndReturn(run func(context.Context) (notify.SilenceState, error)) *RemoteAlertmanagerMock_SilenceState_Call {
_c.Call.Return(run)
return _c
}
// StopAndWait provides a mock function with given fields:
func (_m *RemoteAlertmanagerMock) StopAndWait() {
_m.Called()

View File

@ -4,6 +4,8 @@ import (
"context"
"fmt"
alertingNotify "github.com/grafana/alerting/notify"
"github.com/grafana/grafana/pkg/infra/log"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/models"
@ -91,6 +93,10 @@ func (fam *RemotePrimaryForkedAlertmanager) TestTemplate(ctx context.Context, c
return fam.remote.TestTemplate(ctx, c)
}
func (fam *RemotePrimaryForkedAlertmanager) SilenceState(ctx context.Context) (alertingNotify.SilenceState, error) {
return fam.remote.SilenceState(ctx)
}
func (fam *RemotePrimaryForkedAlertmanager) StopAndWait() {
fam.internal.StopAndWait()
fam.remote.StopAndWait()

View File

@ -6,6 +6,8 @@ import (
"sync"
"time"
alertingNotify "github.com/grafana/alerting/notify"
"github.com/grafana/grafana/pkg/infra/log"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/models"
@ -165,6 +167,10 @@ func (fam *RemoteSecondaryForkedAlertmanager) TestTemplate(ctx context.Context,
return fam.internal.TestTemplate(ctx, c)
}
func (fam *RemoteSecondaryForkedAlertmanager) SilenceState(ctx context.Context) (alertingNotify.SilenceState, error) {
return fam.internal.SilenceState(ctx)
}
func (fam *RemoteSecondaryForkedAlertmanager) StopAndWait() {
// Stop the internal Alertmanager.
fam.internal.StopAndWait()