Provisioning: Fix duplicate validation when multiple organizations have been configured (#44151)

Fix duplicate validation when multiple organizations have been configured. 
This makes sure that all duplicate validation is being made for each organization.

Fixes #44126
This commit is contained in:
Marcus Efraimsson 2022-01-26 11:33:31 +01:00 committed by GitHub
parent 55e1c53e36
commit 7593fc0a20
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 142 additions and 45 deletions

View File

@ -56,61 +56,75 @@ func newDuplicateValidator(logger log.Logger, readers []*FileReader) duplicateVa
return duplicateValidator{logger: logger, readers: readers} return duplicateValidator{logger: logger, readers: readers}
} }
func (c *duplicateValidator) getDuplicates() *duplicateEntries { func (c *duplicateValidator) getDuplicates() map[int64]duplicateEntries {
duplicates := duplicateEntries{ duplicatesByOrg := map[int64]duplicateEntries{}
Titles: make(map[dashboardIdentity]*duplicate),
UIDs: make(map[string]*duplicate),
}
for _, reader := range c.readers { for _, reader := range c.readers {
readerName := reader.Cfg.Name readerName := reader.Cfg.Name
orgID := reader.Cfg.OrgID
tracker := reader.getUsageTracker() tracker := reader.getUsageTracker()
for uid, times := range tracker.uidUsage { if _, exists := duplicatesByOrg[orgID]; !exists {
if _, ok := duplicates.UIDs[uid]; !ok { duplicatesByOrg[orgID] = duplicateEntries{
duplicates.UIDs[uid] = newDuplicate() Titles: make(map[dashboardIdentity]*duplicate),
UIDs: make(map[string]*duplicate),
} }
duplicates.UIDs[uid].Sum += times }
duplicates.UIDs[uid].InvolvedReaders[readerName] = struct{}{}
for uid, times := range tracker.uidUsage {
if _, ok := duplicatesByOrg[orgID].UIDs[uid]; !ok {
duplicatesByOrg[orgID].UIDs[uid] = newDuplicate()
}
duplicatesByOrg[orgID].UIDs[uid].Sum += times
duplicatesByOrg[orgID].UIDs[uid].InvolvedReaders[readerName] = struct{}{}
} }
for id, times := range tracker.titleUsage { for id, times := range tracker.titleUsage {
if _, ok := duplicates.Titles[id]; !ok { if _, ok := duplicatesByOrg[orgID].Titles[id]; !ok {
duplicates.Titles[id] = newDuplicate() duplicatesByOrg[orgID].Titles[id] = newDuplicate()
} }
duplicates.Titles[id].Sum += times duplicatesByOrg[orgID].Titles[id].Sum += times
duplicates.Titles[id].InvolvedReaders[readerName] = struct{}{} duplicatesByOrg[orgID].Titles[id].InvolvedReaders[readerName] = struct{}{}
} }
} }
return &duplicates return duplicatesByOrg
} }
func (c *duplicateValidator) logWarnings(duplicates *duplicateEntries) { func (c *duplicateValidator) logWarnings(duplicatesByOrg map[int64]duplicateEntries) {
for uid, usage := range duplicates.UIDs { for orgID, duplicates := range duplicatesByOrg {
if usage.Sum > 1 { for uid, usage := range duplicates.UIDs {
c.logger.Warn("the same UID is used more than once", "uid", uid, "times", usage.Sum, "providers", if usage.Sum > 1 {
keysToSlice(usage.InvolvedReaders)) c.logger.Warn("the same UID is used more than once", "orgId", orgID, "uid", uid, "times", usage.Sum, "providers",
keysToSlice(usage.InvolvedReaders))
}
} }
}
for id, usage := range duplicates.Titles { for id, usage := range duplicates.Titles {
if usage.Sum > 1 { if usage.Sum > 1 {
c.logger.Warn("dashboard title is not unique in folder", "title", id.title, "folderID", id.folderID, "times", c.logger.Warn("dashboard title is not unique in folder", "orgId", orgID, "title", id.title, "folderID", id.folderID, "times",
usage.Sum, "providers", keysToSlice(usage.InvolvedReaders)) usage.Sum, "providers", keysToSlice(usage.InvolvedReaders))
}
} }
} }
} }
func (c *duplicateValidator) takeAwayWritePermissions(duplicates *duplicateEntries) { func (c *duplicateValidator) takeAwayWritePermissions(duplicatesByOrg map[int64]duplicateEntries) {
involvedReaders := duplicates.InvolvedReaders() // reset write permissions for all readers
for _, reader := range c.readers { for _, reader := range c.readers {
_, isReaderWithDuplicates := involvedReaders[reader.Cfg.Name] reader.changeWritePermissions(false)
// We restrict reader permissions to write to the database here to prevent overloading }
reader.changeWritePermissions(isReaderWithDuplicates)
if isReaderWithDuplicates { for orgID, duplicates := range duplicatesByOrg {
c.logger.Warn("dashboards provisioning provider has no database write permissions because of duplicates", "provider", reader.Cfg.Name) involvedReaders := duplicates.InvolvedReaders()
for _, reader := range c.readers {
_, exists := involvedReaders[reader.Cfg.Name]
if exists {
// We restrict reader permissions to write to the database here to prevent overloading
reader.changeWritePermissions(true)
c.logger.Warn("dashboards provisioning provider has no database write permissions because of duplicates", "provider", reader.Cfg.Name, "orgId", orgID)
}
} }
} }
} }

View File

@ -62,13 +62,13 @@ func TestDuplicatesValidator(t *testing.T) {
duplicates := duplicateValidator.getDuplicates() duplicates := duplicateValidator.getDuplicates()
require.Equal(t, uint8(2), duplicates.UIDs["Z-phNqGmz"].Sum) require.Equal(t, uint8(2), duplicates[1].UIDs["Z-phNqGmz"].Sum)
uidUsageReaders := keysToSlice(duplicates.UIDs["Z-phNqGmz"].InvolvedReaders) uidUsageReaders := keysToSlice(duplicates[1].UIDs["Z-phNqGmz"].InvolvedReaders)
sort.Strings(uidUsageReaders) sort.Strings(uidUsageReaders)
require.Equal(t, []string{"first", "second"}, uidUsageReaders) require.Equal(t, []string{"first", "second"}, uidUsageReaders)
require.Equal(t, uint8(2), duplicates.Titles[identity].Sum) require.Equal(t, uint8(2), duplicates[1].Titles[identity].Sum)
titleUsageReaders := keysToSlice(duplicates.Titles[identity].InvolvedReaders) titleUsageReaders := keysToSlice(duplicates[1].Titles[identity].InvolvedReaders)
sort.Strings(titleUsageReaders) sort.Strings(titleUsageReaders)
require.Equal(t, []string{"first", "second"}, titleUsageReaders) require.Equal(t, []string{"first", "second"}, titleUsageReaders)
@ -77,14 +77,20 @@ func TestDuplicatesValidator(t *testing.T) {
require.True(t, reader2.isDatabaseAccessRestricted()) require.True(t, reader2.isDatabaseAccessRestricted())
}) })
t.Run("Duplicates validator should restrict write access only for readers with duplicates", func(t *testing.T) { t.Run("Duplicates validator should not collect info about duplicate UIDs and titles within folders for different orgs", func(t *testing.T) {
const folderName = "duplicates-validator-folder"
folderID, err := getOrCreateFolderID(context.Background(), cfg, fakeService, folderName)
require.NoError(t, err)
identity := dashboardIdentity{folderID: folderID, title: "Grafana"}
cfg1 := &config{ cfg1 := &config{
Name: "first", Type: "file", OrgID: 1, Folder: "duplicates-validator-folder", Name: "first", Type: "file", OrgID: 1, Folder: folderName,
Options: map[string]interface{}{"path": twoDashboardsWithUID}, Options: map[string]interface{}{"path": dashboardContainingUID},
} }
cfg2 := &config{ cfg2 := &config{
Name: "second", Type: "file", OrgID: 1, Folder: "root", Name: "second", Type: "file", OrgID: 2, Folder: folderName,
Options: map[string]interface{}{"path": defaultDashboards}, Options: map[string]interface{}{"path": dashboardContainingUID},
} }
reader1, err := NewDashboardFileReader(cfg1, logger, nil) reader1, err := NewDashboardFileReader(cfg1, logger, nil)
@ -103,23 +109,100 @@ func TestDuplicatesValidator(t *testing.T) {
duplicates := duplicateValidator.getDuplicates() duplicates := duplicateValidator.getDuplicates()
require.Equal(t, uint8(1), duplicates[1].UIDs["Z-phNqGmz"].Sum)
uidUsageReaders := keysToSlice(duplicates[1].UIDs["Z-phNqGmz"].InvolvedReaders)
sort.Strings(uidUsageReaders)
require.Equal(t, []string{"first"}, uidUsageReaders)
require.Equal(t, uint8(1), duplicates[2].UIDs["Z-phNqGmz"].Sum)
uidUsageReaders = keysToSlice(duplicates[2].UIDs["Z-phNqGmz"].InvolvedReaders)
sort.Strings(uidUsageReaders)
require.Equal(t, []string{"second"}, uidUsageReaders)
require.Equal(t, uint8(1), duplicates[1].Titles[identity].Sum)
titleUsageReaders := keysToSlice(duplicates[1].Titles[identity].InvolvedReaders)
sort.Strings(titleUsageReaders)
require.Equal(t, []string{"first"}, titleUsageReaders)
require.Equal(t, uint8(1), duplicates[2].Titles[identity].Sum)
titleUsageReaders = keysToSlice(duplicates[2].Titles[identity].InvolvedReaders)
sort.Strings(titleUsageReaders)
require.Equal(t, []string{"second"}, titleUsageReaders)
duplicateValidator.validate()
require.False(t, reader1.isDatabaseAccessRestricted())
require.False(t, reader2.isDatabaseAccessRestricted())
})
t.Run("Duplicates validator should restrict write access only for readers with duplicates", func(t *testing.T) {
cfg1 := &config{
Name: "first", Type: "file", OrgID: 1, Folder: "duplicates-validator-folder",
Options: map[string]interface{}{"path": twoDashboardsWithUID},
}
cfg2 := &config{
Name: "second", Type: "file", OrgID: 1, Folder: "root",
Options: map[string]interface{}{"path": defaultDashboards},
}
cfg3 := &config{
Name: "third", Type: "file", OrgID: 2, Folder: "duplicates-validator-folder",
Options: map[string]interface{}{"path": twoDashboardsWithUID},
}
reader1, err := NewDashboardFileReader(cfg1, logger, nil)
require.NoError(t, err)
reader2, err := NewDashboardFileReader(cfg2, logger, nil)
require.NoError(t, err)
reader3, err := NewDashboardFileReader(cfg3, logger, nil)
require.NoError(t, err)
duplicateValidator := newDuplicateValidator(logger, []*FileReader{reader1, reader2, reader3})
err = reader1.walkDisk(context.Background())
require.NoError(t, err)
err = reader2.walkDisk(context.Background())
require.NoError(t, err)
err = reader3.walkDisk(context.Background())
require.NoError(t, err)
duplicates := duplicateValidator.getDuplicates()
folderID, err := getOrCreateFolderID(context.Background(), cfg, fakeService, cfg1.Folder) folderID, err := getOrCreateFolderID(context.Background(), cfg, fakeService, cfg1.Folder)
require.NoError(t, err) require.NoError(t, err)
identity := dashboardIdentity{folderID: folderID, title: "Grafana"} identity := dashboardIdentity{folderID: folderID, title: "Grafana"}
require.Equal(t, uint8(2), duplicates.UIDs["Z-phNqGmz"].Sum) require.Equal(t, uint8(2), duplicates[1].UIDs["Z-phNqGmz"].Sum)
uidUsageReaders := keysToSlice(duplicates.UIDs["Z-phNqGmz"].InvolvedReaders) uidUsageReaders := keysToSlice(duplicates[1].UIDs["Z-phNqGmz"].InvolvedReaders)
sort.Strings(uidUsageReaders) sort.Strings(uidUsageReaders)
require.Equal(t, []string{"first"}, uidUsageReaders) require.Equal(t, []string{"first"}, uidUsageReaders)
require.Equal(t, uint8(2), duplicates.Titles[identity].Sum) require.Equal(t, uint8(2), duplicates[1].Titles[identity].Sum)
titleUsageReaders := keysToSlice(duplicates.Titles[identity].InvolvedReaders) titleUsageReaders := keysToSlice(duplicates[1].Titles[identity].InvolvedReaders)
sort.Strings(titleUsageReaders) sort.Strings(titleUsageReaders)
require.Equal(t, []string{"first"}, titleUsageReaders) require.Equal(t, []string{"first"}, titleUsageReaders)
folderID, err = getOrCreateFolderID(context.Background(), cfg3, fakeService, cfg3.Folder)
require.NoError(t, err)
identity = dashboardIdentity{folderID: folderID, title: "Grafana"}
require.Equal(t, uint8(2), duplicates[2].UIDs["Z-phNqGmz"].Sum)
uidUsageReaders = keysToSlice(duplicates[2].UIDs["Z-phNqGmz"].InvolvedReaders)
sort.Strings(uidUsageReaders)
require.Equal(t, []string{"third"}, uidUsageReaders)
require.Equal(t, uint8(2), duplicates[2].Titles[identity].Sum)
titleUsageReaders = keysToSlice(duplicates[2].Titles[identity].InvolvedReaders)
sort.Strings(titleUsageReaders)
require.Equal(t, []string{"third"}, titleUsageReaders)
duplicateValidator.validate() duplicateValidator.validate()
require.True(t, reader1.isDatabaseAccessRestricted()) require.True(t, reader1.isDatabaseAccessRestricted())
require.False(t, reader2.isDatabaseAccessRestricted()) require.False(t, reader2.isDatabaseAccessRestricted())
require.True(t, reader3.isDatabaseAccessRestricted())
}) })
} }