diff --git a/pkg/services/provisioning/provisioning.go b/pkg/services/provisioning/provisioning.go index 21f61977620..7cc4b993324 100644 --- a/pkg/services/provisioning/provisioning.go +++ b/pkg/services/provisioning/provisioning.go @@ -2,11 +2,12 @@ package provisioning import ( "context" - "github.com/grafana/grafana/pkg/log" - "github.com/pkg/errors" "path" "sync" + "github.com/grafana/grafana/pkg/log" + "github.com/pkg/errors" + "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/services/provisioning/dashboards" "github.com/grafana/grafana/pkg/services/provisioning/datasources" @@ -78,7 +79,9 @@ func (ps *provisioningServiceImpl) Run(ctx context.Context) error { // Wait for unlock. This is tied to new dashboardProvisioner to be instantiated before we start polling. ps.mutex.Lock() - pollingContext, cancelFun := context.WithCancel(ctx) + // Using background here because otherwise if root context was canceled the select later on would + // non-deterministically take one of the route possibly going into one polling loop before exiting. + pollingContext, cancelFun := context.WithCancel(context.Background()) ps.pollingCtxCancel = cancelFun ps.dashboardProvisioner.PollChanges(pollingContext) ps.mutex.Unlock() @@ -88,7 +91,8 @@ func (ps *provisioningServiceImpl) Run(ctx context.Context) error { // Polling was canceled. continue case <-ctx.Done(): - // Root server context was cancelled so just leave. + // Root server context was cancelled so cancel polling and leave. + ps.cancelPolling() return ctx.Err() } } diff --git a/pkg/services/provisioning/provisioning_test.go b/pkg/services/provisioning/provisioning_test.go index 73f207122f7..b959aa66087 100644 --- a/pkg/services/provisioning/provisioning_test.go +++ b/pkg/services/provisioning/provisioning_test.go @@ -3,88 +3,130 @@ package provisioning import ( "context" "errors" + "testing" + "time" + "github.com/grafana/grafana/pkg/services/provisioning/dashboards" "github.com/grafana/grafana/pkg/setting" "github.com/stretchr/testify/assert" - "testing" - "time" ) func TestProvisioningServiceImpl(t *testing.T) { t.Run("Restart dashboard provisioning and stop service", func(t *testing.T) { - service, mock := setup() - ctx, cancel := context.WithCancel(context.Background()) - var serviceRunning bool - var serviceError error - - err := service.ProvisionDashboards() + serviceTest := setup() + err := serviceTest.service.ProvisionDashboards() assert.Nil(t, err) - go func() { - serviceRunning = true - serviceError = service.Run(ctx) - serviceRunning = false - }() - time.Sleep(time.Millisecond) - assert.Equal(t, 1, len(mock.Calls.PollChanges), "PollChanges should have been called") + serviceTest.startService() + serviceTest.waitForPollChanges() - err = service.ProvisionDashboards() + assert.Equal(t, 1, len(serviceTest.mock.Calls.PollChanges), "PollChanges should have been called") + + err = serviceTest.service.ProvisionDashboards() assert.Nil(t, err) - time.Sleep(time.Millisecond) - assert.Equal(t, 2, len(mock.Calls.PollChanges), "PollChanges should have been called 2 times") - pollingCtx := mock.Calls.PollChanges[0].(context.Context) + serviceTest.waitForPollChanges() + assert.Equal(t, 2, len(serviceTest.mock.Calls.PollChanges), "PollChanges should have been called 2 times") + + pollingCtx := serviceTest.mock.Calls.PollChanges[0].(context.Context) assert.Equal(t, context.Canceled, pollingCtx.Err(), "Polling context from first call should have been cancelled") - assert.True(t, serviceRunning, "Service should be still running") + assert.True(t, serviceTest.serviceRunning, "Service should be still running") // Cancelling the root context and stopping the service - cancel() - time.Sleep(time.Millisecond) + serviceTest.cancel() + serviceTest.waitForStop() - assert.False(t, serviceRunning, "Service should not be running") - assert.Equal(t, context.Canceled, serviceError, "Service should have returned canceled error") + assert.False(t, serviceTest.serviceRunning, "Service should not be running") + assert.Equal(t, context.Canceled, serviceTest.serviceError, "Service should have returned canceled error") }) t.Run("Failed reloading does not stop polling with old provisioned", func(t *testing.T) { - service, mock := setup() - ctx, cancel := context.WithCancel(context.Background()) - var serviceRunning bool - - err := service.ProvisionDashboards() + serviceTest := setup() + err := serviceTest.service.ProvisionDashboards() assert.Nil(t, err) - go func() { - serviceRunning = true - _ = service.Run(ctx) - serviceRunning = false - }() - time.Sleep(time.Millisecond) - assert.Equal(t, 1, len(mock.Calls.PollChanges), "PollChanges should have been called") + serviceTest.startService() + serviceTest.waitForPollChanges() + assert.Equal(t, 1, len(serviceTest.mock.Calls.PollChanges), "PollChanges should have been called") - mock.ProvisionFunc = func() error { + serviceTest.mock.ProvisionFunc = func() error { return errors.New("Test error") } - err = service.ProvisionDashboards() + err = serviceTest.service.ProvisionDashboards() assert.NotNil(t, err) - time.Sleep(time.Millisecond) + serviceTest.waitForPollChanges() + // This should have been called with the old provisioner, after the last one failed. - assert.Equal(t, 2, len(mock.Calls.PollChanges), "PollChanges should have been called 2 times") - assert.True(t, serviceRunning, "Service should be still running") + assert.Equal(t, 2, len(serviceTest.mock.Calls.PollChanges), "PollChanges should have been called 2 times") + assert.True(t, serviceTest.serviceRunning, "Service should be still running") // Cancelling the root context and stopping the service - cancel() - + serviceTest.cancel() }) } -func setup() (*provisioningServiceImpl, *dashboards.DashboardProvisionerMock) { - dashMock := dashboards.NewDashboardProvisionerMock() - service := NewProvisioningServiceImpl( +type serviceTestStruct struct { + waitForPollChanges func() + waitForStop func() + waitTimeout time.Duration + + serviceRunning bool + serviceError error + + startService func() + cancel func() + + mock *dashboards.DashboardProvisionerMock + service *provisioningServiceImpl +} + +func setup() *serviceTestStruct { + serviceTest := &serviceTestStruct{} + serviceTest.waitTimeout = time.Second + + pollChangesChannel := make(chan context.Context) + serviceStopped := make(chan interface{}) + + serviceTest.mock = dashboards.NewDashboardProvisionerMock() + serviceTest.mock.PollChangesFunc = func(ctx context.Context) { + pollChangesChannel <- ctx + } + + serviceTest.service = NewProvisioningServiceImpl( func(path string) (dashboards.DashboardProvisioner, error) { - return dashMock, nil + return serviceTest.mock, nil }, nil, nil, ) - service.Cfg = setting.NewCfg() - return service, dashMock + serviceTest.service.Cfg = setting.NewCfg() + + ctx, cancel := context.WithCancel(context.Background()) + serviceTest.cancel = cancel + + serviceTest.startService = func() { + go func() { + serviceTest.serviceRunning = true + serviceTest.serviceError = serviceTest.service.Run(ctx) + serviceTest.serviceRunning = false + serviceStopped <- true + }() + } + + serviceTest.waitForPollChanges = func() { + timeoutChan := time.After(serviceTest.waitTimeout) + select { + case <-pollChangesChannel: + case <-timeoutChan: + } + } + + serviceTest.waitForStop = func() { + timeoutChan := time.After(serviceTest.waitTimeout) + select { + case <-serviceStopped: + case <-timeoutChan: + } + } + + return serviceTest }