2022-07-19 09:32:54 -04:00
package sender
import (
"context"
"fmt"
"math/rand"
"net/url"
"testing"
"time"
"github.com/benbjohnson/clock"
"github.com/go-openapi/strfmt"
2022-11-10 16:34:13 +01:00
"github.com/grafana/grafana/pkg/infra/log/logtest"
2022-07-19 09:32:54 -04:00
models2 "github.com/prometheus/alertmanager/api/v2/models"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/require"
2022-11-10 16:34:13 +01:00
"github.com/grafana/grafana/pkg/components/simplejson"
2022-07-19 09:32:54 -04:00
"github.com/grafana/grafana/pkg/infra/log"
2022-07-20 16:50:49 +02:00
"github.com/grafana/grafana/pkg/services/datasources"
fake_ds "github.com/grafana/grafana/pkg/services/datasources/fakes"
2022-07-19 09:32:54 -04:00
"github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/notifier"
"github.com/grafana/grafana/pkg/services/ngalert/provisioning"
"github.com/grafana/grafana/pkg/services/ngalert/store"
2022-07-20 16:50:49 +02:00
fake_secrets "github.com/grafana/grafana/pkg/services/secrets/fakes"
2022-07-19 09:32:54 -04:00
secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
func TestSendingToExternalAlertmanager ( t * testing . T ) {
ruleKey := models . GenerateRuleKey ( 1 )
fakeAM := NewFakeExternalAlertmanager ( t )
defer fakeAM . Close ( )
fakeAdminConfigStore := & store . AdminConfigurationStoreMock { }
mockedGetAdminConfigurations := fakeAdminConfigStore . EXPECT ( ) . GetAdminConfigurations ( )
mockedClock := clock . NewMock ( )
moa := createMultiOrgAlertmanager ( t , [ ] int64 { 1 } )
appUrl := & url . URL {
Scheme : "http" ,
Host : "localhost" ,
}
2022-11-10 16:34:13 +01:00
ds1 := datasources . DataSource {
Url : fakeAM . Server . URL ,
OrgId : ruleKey . OrgID ,
Type : datasources . DS_ALERTMANAGER ,
JsonData : simplejson . NewFromAny ( map [ string ] interface { } {
"handleGrafanaManagedAlerts" : true ,
"implementation" : "prometheus" ,
} ) ,
}
2022-07-20 16:50:49 +02:00
alertsRouter := NewAlertsRouter ( moa , fakeAdminConfigStore , mockedClock , appUrl , map [ int64 ] struct { } { } , 10 * time . Minute ,
2022-11-10 16:34:13 +01:00
& fake_ds . FakeDataSourceService { DataSources : [ ] * datasources . DataSource { & ds1 } } , fake_secrets . NewFakeSecretsService ( ) )
2022-07-19 09:32:54 -04:00
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey . OrgID , SendAlertsTo : models . AllAlertmanagers } ,
2022-07-19 09:32:54 -04:00
} , nil )
// Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running
// when the first alert triggers.
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
// Then, ensure we've discovered the Alertmanager.
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey . OrgID , 1 , 0 )
var expected [ ] * models2 . PostableAlert
alerts := definitions . PostableAlerts { }
for i := 0 ; i < rand . Intn ( 5 ) + 1 ; i ++ {
alert := generatePostableAlert ( t , mockedClock )
expected = append ( expected , & alert )
alerts . PostableAlerts = append ( alerts . PostableAlerts , alert )
}
alertsRouter . Send ( ruleKey , alerts )
// Eventually, our Alertmanager should have received at least one alert.
assertAlertsDelivered ( t , fakeAM , expected )
// Now, let's remove the Alertmanager from the admin configuration.
mockedGetAdminConfigurations . Return ( nil , nil )
2022-07-19 14:04:48 -04:00
// Again, make sure we sync and verify the externalAlertmanagers.
2022-07-19 09:32:54 -04:00
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . Equal ( t , 0 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 0 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
// Then, ensure we've dropped the Alertmanager.
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey . OrgID , 0 , 0 )
}
func TestSendingToExternalAlertmanager_WithMultipleOrgs ( t * testing . T ) {
ruleKey1 := models . GenerateRuleKey ( 1 )
ruleKey2 := models . GenerateRuleKey ( 2 )
fakeAM := NewFakeExternalAlertmanager ( t )
defer fakeAM . Close ( )
fakeAdminConfigStore := & store . AdminConfigurationStoreMock { }
mockedGetAdminConfigurations := fakeAdminConfigStore . EXPECT ( ) . GetAdminConfigurations ( )
mockedClock := clock . NewMock ( )
moa := createMultiOrgAlertmanager ( t , [ ] int64 { 1 , 2 } )
appUrl := & url . URL {
Scheme : "http" ,
Host : "localhost" ,
}
2022-11-10 16:34:13 +01:00
ds1 := datasources . DataSource {
Url : fakeAM . Server . URL ,
OrgId : ruleKey1 . OrgID ,
Type : datasources . DS_ALERTMANAGER ,
JsonData : simplejson . NewFromAny ( map [ string ] interface { } {
"handleGrafanaManagedAlerts" : true ,
"implementation" : "prometheus" ,
} ) ,
}
fakeDs := & fake_ds . FakeDataSourceService { DataSources : [ ] * datasources . DataSource { & ds1 } }
2022-07-20 16:50:49 +02:00
alertsRouter := NewAlertsRouter ( moa , fakeAdminConfigStore , mockedClock , appUrl , map [ int64 ] struct { } { } , 10 * time . Minute ,
2022-11-10 16:34:13 +01:00
fakeDs , fake_secrets . NewFakeSecretsService ( ) )
2022-07-19 09:32:54 -04:00
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey1 . OrgID , SendAlertsTo : models . AllAlertmanagers } ,
2022-07-19 09:32:54 -04:00
} , nil )
// Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running
// when the first alert triggers.
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
// Then, ensure we've discovered the Alertmanager.
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey1 . OrgID , 1 , 0 )
// 1. Now, let's assume a new org comes along.
2022-11-10 16:34:13 +01:00
ds2 := datasources . DataSource {
Url : fakeAM . Server . URL ,
OrgId : ruleKey2 . OrgID ,
Type : datasources . DS_ALERTMANAGER ,
JsonData : simplejson . NewFromAny ( map [ string ] interface { } {
"handleGrafanaManagedAlerts" : true ,
"implementation" : "prometheus" ,
} ) ,
}
fakeDs . DataSources = append ( fakeDs . DataSources , & ds2 )
2022-07-19 09:32:54 -04:00
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey1 . OrgID , SendAlertsTo : models . AllAlertmanagers } ,
{ OrgID : ruleKey2 . OrgID } ,
2022-07-19 09:32:54 -04:00
} , nil )
2022-07-19 14:04:48 -04:00
// If we sync again, new externalAlertmanagers must have spawned.
2022-07-19 09:32:54 -04:00
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . Equal ( t , 2 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 2 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
// Then, ensure we've discovered the Alertmanager for the new organization.
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey1 . OrgID , 1 , 0 )
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey2 . OrgID , 1 , 0 )
var expected [ ] * models2 . PostableAlert
alerts1 := definitions . PostableAlerts { }
for i := 0 ; i < rand . Intn ( 5 ) + 1 ; i ++ {
alert := generatePostableAlert ( t , mockedClock )
expected = append ( expected , & alert )
alerts1 . PostableAlerts = append ( alerts1 . PostableAlerts , alert )
}
alerts2 := definitions . PostableAlerts { }
for i := 0 ; i < rand . Intn ( 5 ) + 1 ; i ++ {
alert := generatePostableAlert ( t , mockedClock )
expected = append ( expected , & alert )
alerts2 . PostableAlerts = append ( alerts2 . PostableAlerts , alert )
}
alertsRouter . Send ( ruleKey1 , alerts1 )
alertsRouter . Send ( ruleKey2 , alerts2 )
assertAlertsDelivered ( t , fakeAM , expected )
// 2. Next, let's modify the configuration of an organization by adding an extra alertmanager.
fakeAM2 := NewFakeExternalAlertmanager ( t )
2022-11-10 16:34:13 +01:00
ds3 := datasources . DataSource {
Url : fakeAM2 . Server . URL ,
OrgId : ruleKey2 . OrgID ,
Type : datasources . DS_ALERTMANAGER ,
JsonData : simplejson . NewFromAny ( map [ string ] interface { } {
"handleGrafanaManagedAlerts" : true ,
"implementation" : "prometheus" ,
} ) ,
}
fakeDs . DataSources = append ( fakeDs . DataSources , & ds3 )
2022-07-19 09:32:54 -04:00
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey1 . OrgID , SendAlertsTo : models . AllAlertmanagers } ,
{ OrgID : ruleKey2 . OrgID } ,
2022-07-19 09:32:54 -04:00
} , nil )
// Before we sync, let's grab the existing hash of this particular org.
2022-07-19 14:04:48 -04:00
currentHash := alertsRouter . externalAlertmanagersCfgHash [ ruleKey2 . OrgID ]
2022-07-19 09:32:54 -04:00
// Now, sync again.
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
// The hash for org two should not be the same and we should still have two externalAlertmanagers.
require . NotEqual ( t , alertsRouter . externalAlertmanagersCfgHash [ ruleKey2 . OrgID ] , currentHash )
require . Equal ( t , 2 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 2 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey2 . OrgID , 2 , 0 )
// 3. Now, let's provide a configuration that fails for OrgID = 1.
2022-11-10 16:34:13 +01:00
fakeDs . DataSources [ 0 ] . Url = "123://invalid.org"
2022-07-19 09:32:54 -04:00
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey1 . OrgID , SendAlertsTo : models . AllAlertmanagers } ,
{ OrgID : ruleKey2 . OrgID } ,
2022-07-19 09:32:54 -04:00
} , nil )
// Before we sync, let's get the current config hash.
2022-07-19 14:04:48 -04:00
currentHash = alertsRouter . externalAlertmanagersCfgHash [ ruleKey1 . OrgID ]
2022-07-19 09:32:54 -04:00
// Now, sync again.
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-11-10 16:34:13 +01:00
// The old configuration should not be running.
require . NotEqual ( t , alertsRouter . externalAlertmanagersCfgHash [ ruleKey1 . OrgID ] , currentHash )
require . Equal ( t , 0 , len ( alertsRouter . AlertmanagersFor ( ruleKey1 . OrgID ) ) )
2022-07-19 09:32:54 -04:00
// If we fix it - it should be applied.
2022-11-10 16:34:13 +01:00
fakeDs . DataSources [ 0 ] . Url = "notarealalertmanager:3030"
2022-07-19 09:32:54 -04:00
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey1 . OrgID , SendAlertsTo : models . AllAlertmanagers } ,
{ OrgID : ruleKey2 . OrgID } ,
2022-07-19 09:32:54 -04:00
} , nil )
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . NotEqual ( t , alertsRouter . externalAlertmanagersCfgHash [ ruleKey1 . OrgID ] , currentHash )
2022-07-19 09:32:54 -04:00
// Finally, remove everything.
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration { } , nil )
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . Equal ( t , 0 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 0 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey1 . OrgID , 0 , 0 )
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey2 . OrgID , 0 , 0 )
}
func TestChangingAlertmanagersChoice ( t * testing . T ) {
ruleKey := models . GenerateRuleKey ( 1 )
fakeAM := NewFakeExternalAlertmanager ( t )
defer fakeAM . Close ( )
fakeAdminConfigStore := & store . AdminConfigurationStoreMock { }
mockedGetAdminConfigurations := fakeAdminConfigStore . EXPECT ( ) . GetAdminConfigurations ( )
mockedClock := clock . NewMock ( )
mockedClock . Set ( time . Now ( ) )
moa := createMultiOrgAlertmanager ( t , [ ] int64 { 1 } )
appUrl := & url . URL {
Scheme : "http" ,
Host : "localhost" ,
}
2022-11-10 16:34:13 +01:00
ds := datasources . DataSource {
Url : fakeAM . Server . URL ,
OrgId : ruleKey . OrgID ,
Type : datasources . DS_ALERTMANAGER ,
JsonData : simplejson . NewFromAny ( map [ string ] interface { } {
"handleGrafanaManagedAlerts" : true ,
"implementation" : "prometheus" ,
} ) ,
}
2022-07-20 16:50:49 +02:00
alertsRouter := NewAlertsRouter ( moa , fakeAdminConfigStore , mockedClock , appUrl , map [ int64 ] struct { } { } ,
2022-11-10 16:34:13 +01:00
10 * time . Minute , & fake_ds . FakeDataSourceService { DataSources : [ ] * datasources . DataSource { & ds } } , fake_secrets . NewFakeSecretsService ( ) )
2022-07-19 09:32:54 -04:00
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey . OrgID , SendAlertsTo : models . AllAlertmanagers } ,
2022-07-19 09:32:54 -04:00
} , nil )
// Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running
// when the first alert triggers.
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
require . Equal ( t , models . AllAlertmanagers , alertsRouter . sendAlertsTo [ ruleKey . OrgID ] )
// Then, ensure we've discovered the Alertmanager.
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey . OrgID , 1 , 0 )
var expected [ ] * models2 . PostableAlert
alerts := definitions . PostableAlerts { }
for i := 0 ; i < rand . Intn ( 5 ) + 1 ; i ++ {
alert := generatePostableAlert ( t , mockedClock )
expected = append ( expected , & alert )
alerts . PostableAlerts = append ( alerts . PostableAlerts , alert )
}
alertsRouter . Send ( ruleKey , alerts )
// Eventually, our Alertmanager should have received at least one alert.
assertAlertsDelivered ( t , fakeAM , expected )
// Now, let's change the Alertmanagers choice to send only to the external Alertmanager.
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey . OrgID , SendAlertsTo : models . ExternalAlertmanagers } ,
2022-07-19 09:32:54 -04:00
} , nil )
2022-07-19 14:04:48 -04:00
// Again, make sure we sync and verify the externalAlertmanagers.
2022-07-19 09:32:54 -04:00
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey . OrgID , 1 , 0 )
require . Equal ( t , models . ExternalAlertmanagers , alertsRouter . sendAlertsTo [ ruleKey . OrgID ] )
// Finally, let's change the Alertmanagers choice to send only to the internal Alertmanager.
mockedGetAdminConfigurations . Return ( [ ] * models . AdminConfiguration {
2022-11-10 16:34:13 +01:00
{ OrgID : ruleKey . OrgID , SendAlertsTo : models . InternalAlertmanager } ,
2022-07-19 09:32:54 -04:00
} , nil )
2022-07-19 14:04:48 -04:00
// Again, make sure we sync and verify the externalAlertmanagers.
// externalAlertmanagers should be running even though alerts are being handled externally.
2022-07-19 09:32:54 -04:00
require . NoError ( t , alertsRouter . SyncAndApplyConfigFromDatabase ( ) )
2022-07-19 14:04:48 -04:00
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagers ) )
require . Equal ( t , 1 , len ( alertsRouter . externalAlertmanagersCfgHash ) )
2022-07-19 09:32:54 -04:00
// Then, ensure the Alertmanager is still listed and the Alertmanagers choice has changed.
assertAlertmanagersStatusForOrg ( t , alertsRouter , ruleKey . OrgID , 1 , 0 )
require . Equal ( t , models . InternalAlertmanager , alertsRouter . sendAlertsTo [ ruleKey . OrgID ] )
alertsRouter . Send ( ruleKey , alerts )
am , err := moa . AlertmanagerFor ( ruleKey . OrgID )
require . NoError ( t , err )
actualAlerts , err := am . GetAlerts ( true , true , true , nil , "" )
require . NoError ( t , err )
require . Len ( t , actualAlerts , len ( expected ) )
}
func assertAlertmanagersStatusForOrg ( t * testing . T , alertsRouter * AlertsRouter , orgID int64 , active , dropped int ) {
t . Helper ( )
require . Eventuallyf ( t , func ( ) bool {
return len ( alertsRouter . AlertmanagersFor ( orgID ) ) == active && len ( alertsRouter . DroppedAlertmanagersFor ( orgID ) ) == dropped
} , 10 * time . Second , 200 * time . Millisecond ,
fmt . Sprintf ( "expected %d active Alertmanagers and %d dropped ones but got %d active and %d dropped" , active , dropped , len ( alertsRouter . AlertmanagersFor ( orgID ) ) , len ( alertsRouter . DroppedAlertmanagersFor ( orgID ) ) ) )
}
func assertAlertsDelivered ( t * testing . T , fakeAM * FakeExternalAlertmanager , expectedAlerts [ ] * models2 . PostableAlert ) {
t . Helper ( )
require . Eventuallyf ( t , func ( ) bool {
return fakeAM . AlertsCount ( ) == len ( expectedAlerts )
} , 10 * time . Second , 200 * time . Millisecond , fmt . Sprintf ( "expected %d alerts to be delivered to remote Alertmanager but only %d was delivered" , len ( expectedAlerts ) , fakeAM . AlertsCount ( ) ) )
require . Len ( t , fakeAM . Alerts ( ) , len ( expectedAlerts ) )
}
func generatePostableAlert ( t * testing . T , clk clock . Clock ) models2 . PostableAlert {
t . Helper ( )
u := url . URL {
Scheme : "http" ,
Host : "localhost" ,
RawPath : "/" + util . GenerateShortUID ( ) ,
}
return models2 . PostableAlert {
Annotations : models2 . LabelSet ( models . GenerateAlertLabels ( 5 , "ann-" ) ) ,
EndsAt : strfmt . DateTime ( clk . Now ( ) . Add ( 1 * time . Minute ) ) ,
StartsAt : strfmt . DateTime ( clk . Now ( ) ) ,
Alert : models2 . Alert {
GeneratorURL : strfmt . URI ( u . String ( ) ) ,
Labels : models2 . LabelSet ( models . GenerateAlertLabels ( 5 , "lbl-" ) ) ,
} ,
}
}
func createMultiOrgAlertmanager ( t * testing . T , orgs [ ] int64 ) * notifier . MultiOrgAlertmanager {
t . Helper ( )
tmpDir := t . TempDir ( )
orgStore := notifier . NewFakeOrgStore ( t , orgs )
cfg := & setting . Cfg {
DataPath : tmpDir ,
UnifiedAlerting : setting . UnifiedAlertingSettings {
AlertmanagerConfigPollInterval : 3 * time . Minute ,
DefaultConfiguration : setting . GetAlertmanagerDefaultConfiguration ( ) ,
DisabledOrgs : map [ int64 ] struct { } { } ,
} , // do not poll in tests.
}
cfgStore := notifier . NewFakeConfigStore ( t , make ( map [ int64 ] * models . AlertConfiguration ) )
kvStore := notifier . NewFakeKVStore ( t )
registry := prometheus . NewPedanticRegistry ( )
m := metrics . NewNGAlert ( registry )
2022-07-20 16:50:49 +02:00
secretsService := secretsManager . SetupTestService ( t , fake_secrets . NewFakeSecretsStore ( ) )
2022-07-19 09:32:54 -04:00
decryptFn := secretsService . GetDecryptedValue
moa , err := notifier . NewMultiOrgAlertmanager ( cfg , & cfgStore , & orgStore , kvStore , provisioning . NewFakeProvisioningStore ( ) , decryptFn , m . GetMultiOrgAlertmanagerMetrics ( ) , nil , log . New ( "testlogger" ) , secretsService )
require . NoError ( t , err )
require . NoError ( t , moa . LoadAndSyncAlertmanagersForOrgs ( context . Background ( ) ) )
require . Eventually ( t , func ( ) bool {
for _ , org := range orgs {
_ , err := moa . AlertmanagerFor ( org )
if err != nil {
return false
}
}
return true
} , 10 * time . Second , 100 * time . Millisecond )
return moa
}
2022-07-20 16:50:49 +02:00
func TestBuildExternalURL ( t * testing . T ) {
sch := AlertsRouter {
secretService : fake_secrets . NewFakeSecretsService ( ) ,
}
tests := [ ] struct {
name string
ds * datasources . DataSource
expectedURL string
} {
{
name : "datasource without auth" ,
ds : & datasources . DataSource {
Url : "https://localhost:9000" ,
} ,
expectedURL : "https://localhost:9000" ,
} ,
{
name : "datasource without auth and with path" ,
ds : & datasources . DataSource {
Url : "https://localhost:9000/path/to/am" ,
} ,
expectedURL : "https://localhost:9000/path/to/am" ,
} ,
{
name : "datasource with auth" ,
ds : & datasources . DataSource {
Url : "https://localhost:9000" ,
BasicAuth : true ,
BasicAuthUser : "johndoe" ,
SecureJsonData : map [ string ] [ ] byte {
"basicAuthPassword" : [ ] byte ( "123" ) ,
} ,
} ,
expectedURL : "https://johndoe:123@localhost:9000" ,
} ,
{
name : "datasource with auth and path" ,
ds : & datasources . DataSource {
Url : "https://localhost:9000/path/to/am" ,
BasicAuth : true ,
BasicAuthUser : "johndoe" ,
SecureJsonData : map [ string ] [ ] byte {
"basicAuthPassword" : [ ] byte ( "123" ) ,
} ,
} ,
expectedURL : "https://johndoe:123@localhost:9000/path/to/am" ,
} ,
2022-08-01 10:20:43 +02:00
{
name : "with no scheme specified in the datasource" ,
ds : & datasources . DataSource {
Url : "localhost:9000/path/to/am" ,
BasicAuth : true ,
BasicAuthUser : "johndoe" ,
SecureJsonData : map [ string ] [ ] byte {
"basicAuthPassword" : [ ] byte ( "123" ) ,
} ,
} ,
expectedURL : "http://johndoe:123@localhost:9000/path/to/am" ,
} ,
{
name : "with no scheme specified not auth in the datasource" ,
ds : & datasources . DataSource {
Url : "localhost:9000/path/to/am" ,
} ,
expectedURL : "http://localhost:9000/path/to/am" ,
} ,
2022-07-20 16:50:49 +02:00
}
for _ , test := range tests {
t . Run ( test . name , func ( t * testing . T ) {
url , err := sch . buildExternalURL ( test . ds )
require . NoError ( t , err )
require . Equal ( t , test . expectedURL , url )
} )
}
}
2022-11-10 16:34:13 +01:00
func TestAlertManegers_asSHA256 ( t * testing . T ) {
tc := [ ] struct {
name string
amUrls [ ] string
ciphertext string
} {
{
name : "asSHA256" ,
amUrls : [ ] string { "http://localhost:9093" } ,
ciphertext : "3ec9db375a5ba12f7c7b704922cf4b8e21a31e30d85be2386803829f0ee24410" ,
} ,
}
for _ , tt := range tc {
t . Run ( tt . name , func ( t * testing . T ) {
require . Equal ( t , tt . ciphertext , asSHA256 ( tt . amUrls ) )
} )
}
}
func TestAlertManagers_buildRedactedAMs ( t * testing . T ) {
fakeLogger := logtest . Fake { }
tc := [ ] struct {
name string
orgId int64
amUrls [ ] string
errCalls int
errLog string
errCtx [ ] interface { }
expected [ ] string
} {
{
name : "buildRedactedAMs" ,
orgId : 1 ,
amUrls : [ ] string { "http://user:password@localhost:9093" } ,
errCalls : 0 ,
errLog : "" ,
expected : [ ] string { "http://user:xxxxx@localhost:9093" } ,
} ,
{
name : "Error building redacted AM URLs" ,
orgId : 2 ,
amUrls : [ ] string { "1234://user:password@localhost:9094" } ,
errCalls : 1 ,
errLog : "Failed to parse alertmanager string" ,
expected : nil ,
} ,
}
for _ , tt := range tc {
t . Run ( tt . name , func ( t * testing . T ) {
require . Equal ( t , tt . expected , buildRedactedAMs ( & fakeLogger , tt . amUrls , tt . orgId ) )
require . Equal ( t , tt . errCalls , fakeLogger . ErrorLogs . Calls )
require . Equal ( t , tt . errLog , fakeLogger . ErrorLogs . Message )
} )
}
}