mirror of
https://github.com/mattermost/mattermost.git
synced 2025-02-25 18:55:24 -06:00
MM-57084: Use cache for GetAllProfiles (#26391)
UserStore.GetAllProfiles is a very frequent call. We cache it when there are no options passed which is the default case. https://mattermost.atlassian.net/browse/MM-57084 ```release-note NONE ```
This commit is contained in:
parent
4fda7e6f34
commit
bca5ab9a1f
@ -263,9 +263,15 @@ func (a *App) createUserOrGuest(c request.CTX, user *model.User, guest bool) (*m
|
||||
}
|
||||
}
|
||||
|
||||
if user.EmailVerified {
|
||||
a.InvalidateCacheForUser(ruser.Id)
|
||||
// We always invalidate the user because we actually need to invalidate
|
||||
// in case the user's EmailVerified is true, but we also always need to invalidate
|
||||
// the GetAllProfiles cache.
|
||||
// To have a proper fix would mean duplicating the invalidation of GetAllProfiles
|
||||
// everywhere else. Therefore, to keep things simple we always invalidate both caches here.
|
||||
// The performance penalty for invalidating the UserById cache is nil because the user was just created.
|
||||
a.InvalidateCacheForUser(ruser.Id)
|
||||
|
||||
if user.EmailVerified {
|
||||
nUser, err := a.ch.srv.userService.GetUser(ruser.Id)
|
||||
if err != nil {
|
||||
var nfErr *store.ErrNotFound
|
||||
|
@ -112,6 +112,7 @@ type LocalCacheStore struct {
|
||||
postsUsageCache cache.Cache
|
||||
|
||||
user *LocalCacheUserStore
|
||||
allUserCache cache.Cache
|
||||
userProfileByIdsCache cache.Cache
|
||||
profilesInChannelCache cache.Cache
|
||||
|
||||
@ -315,6 +316,14 @@ func NewLocalCacheLayer(baseStore store.Store, metrics einterfaces.MetricsInterf
|
||||
localCacheStore.termsOfService = LocalCacheTermsOfServiceStore{TermsOfServiceStore: baseStore.TermsOfService(), rootStore: &localCacheStore}
|
||||
|
||||
// Users
|
||||
if localCacheStore.allUserCache, err = cacheProvider.NewCache(&cache.CacheOptions{
|
||||
Size: 1,
|
||||
Name: "AllUserProfiles",
|
||||
DefaultExpiry: UserProfileByIDSec * time.Second,
|
||||
InvalidateClusterEvent: model.ClusterEventInvalidateCacheForAllProfiles,
|
||||
}); err != nil {
|
||||
return
|
||||
}
|
||||
if localCacheStore.userProfileByIdsCache, err = cacheProvider.NewCache(&cache.CacheOptions{
|
||||
Size: UserProfileByIDCacheSize,
|
||||
Name: "UserProfileByIds",
|
||||
@ -372,6 +381,7 @@ func NewLocalCacheLayer(baseStore store.Store, metrics einterfaces.MetricsInterf
|
||||
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForTermsOfService, localCacheStore.termsOfService.handleClusterInvalidateTermsOfService)
|
||||
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForProfileByIds, localCacheStore.user.handleClusterInvalidateScheme)
|
||||
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForProfileInChannel, localCacheStore.user.handleClusterInvalidateProfilesInChannel)
|
||||
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForAllProfiles, localCacheStore.user.handleClusterInvalidateAllProfiles)
|
||||
cluster.RegisterClusterMessageHandler(model.ClusterEventInvalidateCacheForTeams, localCacheStore.team.handleClusterInvalidateTeam)
|
||||
}
|
||||
return
|
||||
@ -497,6 +507,7 @@ func (s *LocalCacheStore) Invalidate() {
|
||||
s.doClearCacheCluster(s.termsOfServiceCache)
|
||||
s.doClearCacheCluster(s.lastPostTimeCache)
|
||||
s.doClearCacheCluster(s.userProfileByIdsCache)
|
||||
s.doClearCacheCluster(s.allUserCache)
|
||||
s.doClearCacheCluster(s.profilesInChannelCache)
|
||||
s.doClearCacheCluster(s.teamAllTeamIdsForUserCache)
|
||||
s.doClearCacheCluster(s.rolePermissionsCache)
|
||||
|
@ -157,6 +157,7 @@ func getMockStore(t *testing.T) *mocks.Store {
|
||||
}
|
||||
mockUserStore.On("GetAllProfilesInChannel", mock.Anything, "123", true).Return(fakeProfilesInChannelMap, nil)
|
||||
mockUserStore.On("GetAllProfilesInChannel", mock.Anything, "123", false).Return(fakeProfilesInChannelMap, nil)
|
||||
mockUserStore.On("GetAllProfiles", mock.AnythingOfType("*model.UserGetOptions")).Return(fakeUser, nil)
|
||||
|
||||
mockUserStore.On("Get", mock.Anything, "123").Return(fakeUser[0], nil)
|
||||
users := []*model.User{
|
||||
|
@ -21,6 +21,8 @@ type LocalCacheUserStore struct {
|
||||
userProfileByIdsInvalidations map[string]bool
|
||||
}
|
||||
|
||||
const allUserKey = "ALL"
|
||||
|
||||
func (s *LocalCacheUserStore) handleClusterInvalidateScheme(msg *model.ClusterMessage) {
|
||||
if bytes.Equal(msg.Data, clearCacheMessageData) {
|
||||
s.rootStore.userProfileByIdsCache.Purge()
|
||||
@ -40,8 +42,17 @@ func (s *LocalCacheUserStore) handleClusterInvalidateProfilesInChannel(msg *mode
|
||||
}
|
||||
}
|
||||
|
||||
func (s *LocalCacheUserStore) handleClusterInvalidateAllProfiles(msg *model.ClusterMessage) {
|
||||
if bytes.Equal(msg.Data, clearCacheMessageData) {
|
||||
s.rootStore.allUserCache.Purge()
|
||||
} else {
|
||||
s.rootStore.allUserCache.Remove(string(msg.Data))
|
||||
}
|
||||
}
|
||||
|
||||
func (s *LocalCacheUserStore) ClearCaches() {
|
||||
s.rootStore.userProfileByIdsCache.Purge()
|
||||
s.rootStore.allUserCache.Purge()
|
||||
s.rootStore.profilesInChannelCache.Purge()
|
||||
|
||||
if s.rootStore.metrics != nil {
|
||||
@ -55,6 +66,7 @@ func (s *LocalCacheUserStore) InvalidateProfileCacheForUser(userId string) {
|
||||
s.userProfileByIdsInvalidations[userId] = true
|
||||
s.userProfileByIdsMut.Unlock()
|
||||
s.rootStore.doInvalidateCacheCluster(s.rootStore.userProfileByIdsCache, userId, nil)
|
||||
s.rootStore.doInvalidateCacheCluster(s.rootStore.allUserCache, allUserKey, nil)
|
||||
|
||||
if s.rootStore.metrics != nil {
|
||||
s.rootStore.metrics.IncrementMemCacheInvalidationCounter("Profile By Ids - Remove")
|
||||
@ -85,6 +97,30 @@ func (s *LocalCacheUserStore) InvalidateProfilesInChannelCache(channelID string)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *LocalCacheUserStore) GetAllProfiles(options *model.UserGetOptions) ([]*model.User, error) {
|
||||
if isEmptyOptions(options) &&
|
||||
options.Page == 0 && options.PerPage == 100 { // This is hardcoded to the webapp call.
|
||||
// read from cache
|
||||
var users []*model.User
|
||||
if err := s.rootStore.doStandardReadCache(s.rootStore.allUserCache, allUserKey, &users); err == nil {
|
||||
return users, nil
|
||||
}
|
||||
|
||||
users, err := s.UserStore.GetAllProfiles(options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// populate the cache only for those options.
|
||||
s.rootStore.doStandardAddToCache(s.rootStore.allUserCache, allUserKey, users)
|
||||
|
||||
return users, nil
|
||||
}
|
||||
|
||||
// For any other case, simply use the store
|
||||
return s.UserStore.GetAllProfiles(options)
|
||||
}
|
||||
|
||||
func (s *LocalCacheUserStore) GetAllProfilesInChannel(ctx context.Context, channelId string, allowFromCache bool) (map[string]*model.User, error) {
|
||||
if allowFromCache {
|
||||
var cachedMap map[string]*model.User
|
||||
@ -261,3 +297,26 @@ func dedup(elements []string) []string {
|
||||
|
||||
return elements[:j+1]
|
||||
}
|
||||
|
||||
func isEmptyOptions(options *model.UserGetOptions) bool {
|
||||
// We check to see if any of the options are set or not, and then
|
||||
// use the cache only if none are set, which is the most common case.
|
||||
// options.WithoutTeam, Sort is unused
|
||||
if options.InTeamId == "" &&
|
||||
options.NotInTeamId == "" &&
|
||||
options.InChannelId == "" &&
|
||||
options.NotInChannelId == "" &&
|
||||
options.InGroupId == "" &&
|
||||
options.NotInGroupId == "" &&
|
||||
!options.GroupConstrained &&
|
||||
!options.Inactive &&
|
||||
!options.Active &&
|
||||
options.Role == "" &&
|
||||
len(options.Roles) == 0 &&
|
||||
len(options.ChannelRoles) == 0 &&
|
||||
len(options.TeamRoles) == 0 &&
|
||||
options.ViewRestrictions == nil {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@ -118,6 +118,43 @@ func TestUserStoreCache(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestUserStoreGetAllProfiles(t *testing.T) {
|
||||
t.Run("first call not cached, second cached and returning same data", func(t *testing.T) {
|
||||
mockStore := getMockStore(t)
|
||||
mockCacheProvider := getMockCacheProvider()
|
||||
cachedStore, err := NewLocalCacheLayer(mockStore, nil, nil, mockCacheProvider)
|
||||
require.NoError(t, err)
|
||||
|
||||
users, err := cachedStore.User().GetAllProfiles(&model.UserGetOptions{Page: 0, PerPage: 100})
|
||||
require.NoError(t, err)
|
||||
mockStore.User().(*mocks.UserStore).AssertNumberOfCalls(t, "GetAllProfiles", 1)
|
||||
|
||||
users2, _ := cachedStore.User().GetAllProfiles(&model.UserGetOptions{Page: 0, PerPage: 100})
|
||||
mockStore.User().(*mocks.UserStore).AssertNumberOfCalls(t, "GetAllProfiles", 1)
|
||||
assert.Equal(t, users, users2)
|
||||
|
||||
_, _ = cachedStore.User().GetAllProfiles(&model.UserGetOptions{Page: 2, PerPage: 1})
|
||||
mockStore.User().(*mocks.UserStore).AssertNumberOfCalls(t, "GetAllProfiles", 2)
|
||||
assert.Equal(t, users, users2)
|
||||
})
|
||||
|
||||
t.Run("different page sizes aren't cached", func(t *testing.T) {
|
||||
mockStore := getMockStore(t)
|
||||
mockCacheProvider := getMockCacheProvider()
|
||||
cachedStore, err := NewLocalCacheLayer(mockStore, nil, nil, mockCacheProvider)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, _ = cachedStore.User().GetAllProfiles(&model.UserGetOptions{Page: 0, PerPage: 100})
|
||||
mockStore.User().(*mocks.UserStore).AssertNumberOfCalls(t, "GetAllProfiles", 1)
|
||||
|
||||
_, _ = cachedStore.User().GetAllProfiles(&model.UserGetOptions{Page: 2, PerPage: 1})
|
||||
mockStore.User().(*mocks.UserStore).AssertNumberOfCalls(t, "GetAllProfiles", 2)
|
||||
|
||||
_, _ = cachedStore.User().GetAllProfiles(&model.UserGetOptions{Page: 1, PerPage: 2})
|
||||
mockStore.User().(*mocks.UserStore).AssertNumberOfCalls(t, "GetAllProfiles", 3)
|
||||
})
|
||||
}
|
||||
|
||||
func TestUserStoreProfilesInChannelCache(t *testing.T) {
|
||||
fakeChannelId := "123"
|
||||
fakeUserId := "456"
|
||||
|
@ -20,6 +20,7 @@ const (
|
||||
ClusterEventInvalidateCacheForRoles ClusterEvent = "inv_roles"
|
||||
ClusterEventInvalidateCacheForRolePermissions ClusterEvent = "inv_role_permissions"
|
||||
ClusterEventInvalidateCacheForProfileByIds ClusterEvent = "inv_profile_ids"
|
||||
ClusterEventInvalidateCacheForAllProfiles ClusterEvent = "inv_all_profiles"
|
||||
ClusterEventInvalidateCacheForProfileInChannel ClusterEvent = "inv_profile_in_channel"
|
||||
ClusterEventInvalidateCacheForSchemes ClusterEvent = "inv_schemes"
|
||||
ClusterEventInvalidateCacheForFileInfos ClusterEvent = "inv_file_infos"
|
||||
|
@ -8,7 +8,7 @@ export default {
|
||||
PAGE_SIZE_MAXIMUM: 200,
|
||||
LOGS_PAGE_SIZE_DEFAULT: 10000,
|
||||
AUDITS_CHUNK_SIZE: 100,
|
||||
PROFILE_CHUNK_SIZE: 100,
|
||||
PROFILE_CHUNK_SIZE: 100, // WARN: Do not change this without changing the cache key on server side as well. See https://github.com/mattermost/mattermost/pull/26391.
|
||||
CHANNELS_CHUNK_SIZE: 50,
|
||||
TEAMS_CHUNK_SIZE: 50,
|
||||
JOBS_CHUNK_SIZE: 50,
|
||||
|
Loading…
Reference in New Issue
Block a user