MM-59271 Add DataLoader classes and batch loading missing users and statuses (#27804)

* Change addUserIdsForStatusAndProfileFetchingPoll to only ever receive arrays of user IDs

* Remove bufferCounter in favour of measuring array length

* Split addUserIdsForStatusAndProfileFetchingPoll into two separate actions

* Slightly simplify getPendingStatusesById

* Add DataLoaderInterval and use in actions/status_profile_polling

* Add tests for IntervalDataLoader

* Start adding DelayedDataLoader

* Updated DelayedDataLoader to split requested data into batches and made addIdsToLoad not return a promise by default

* Modify getMissingProfilesForIds to use a DataLoader and attach DataLoaders to the store

* Add tests for getMissingProfilesByIds loading statuses

* Revert change that defined getMissingProfilesByIds in a closure

* Change getMissingProfilesByUsernames to also use a DelayedDataLoader and add tests

* Pass extra argument into mocked version of Thunk

* Rename DataLoader.addIdsToLoad to DataLoader.queueForLoading

* Rename things and address feedback
This commit is contained in:
Harrison Healey 2024-08-06 15:39:10 -04:00 committed by GitHub
parent e1f0c8ee57
commit 56c84df916
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 1119 additions and 201 deletions

View File

@ -1,6 +1,7 @@
/.github/workflows/channels-ci.yml @mattermost/web-platform
/webapp/package.json @mattermost/web-platform
/webapp/channels/package.json @mattermost/web-platform
/webapp/channels/src/packages/mattermost-redux/src/store/configureStore.ts @hmhealey
/webapp/Makefile @mattermost/web-platform
/webapp/package-lock.json @mattermost/web-platform
/webapp/platform/*/package.json @mattermost/web-platform

View File

@ -5,7 +5,7 @@ import cloneDeep from 'lodash/cloneDeep';
import type {UserProfile} from '@mattermost/types/users';
import {addUserIdsForStatusAndProfileFetchingPoll} from 'mattermost-redux/actions/status_profile_polling';
import {addUserIdsForStatusFetchingPoll} from 'mattermost-redux/actions/status_profile_polling';
import {getStatusesByIds} from 'mattermost-redux/actions/users';
import {Preferences} from 'mattermost-redux/constants';
@ -20,7 +20,7 @@ jest.mock('mattermost-redux/actions/users', () => ({
}));
jest.mock('mattermost-redux/actions/status_profile_polling', () => ({
addUserIdsForStatusAndProfileFetchingPoll: jest.fn(() => {
addUserIdsForStatusFetchingPoll: jest.fn(() => {
return {type: ''};
}),
}));
@ -88,8 +88,8 @@ describe('actions/status_actions', () => {
const state = cloneDeep(initialState);
const testStore = mockStore(state);
testStore.dispatch(Actions.addVisibleUsersInCurrentChannelToStatusPoll());
expect(addUserIdsForStatusAndProfileFetchingPoll).toHaveBeenCalled();
expect(addUserIdsForStatusAndProfileFetchingPoll).toHaveBeenCalledWith({userIdsForStatus: ['user_id2', 'user_id3']});
expect(addUserIdsForStatusFetchingPoll).toHaveBeenCalled();
expect(addUserIdsForStatusFetchingPoll).toHaveBeenCalledWith(['user_id2', 'user_id3']);
});
test('load statuses with empty channel and user in sidebar', () => {
@ -97,7 +97,7 @@ describe('actions/status_actions', () => {
state.entities.channels.currentChannelId = 'channel_id2';
const testStore = mockStore(state);
testStore.dispatch(Actions.addVisibleUsersInCurrentChannelToStatusPoll());
expect(addUserIdsForStatusAndProfileFetchingPoll).toHaveBeenCalledWith({userIdsForStatus: ['user_id3']});
expect(addUserIdsForStatusFetchingPoll).toHaveBeenCalledWith(['user_id3']);
});
test('load statuses with empty channel and no users in sidebar', () => {
@ -106,7 +106,7 @@ describe('actions/status_actions', () => {
state.entities.preferences.myPreferences = {};
const testStore = mockStore(state);
testStore.dispatch(Actions.addVisibleUsersInCurrentChannelToStatusPoll());
expect(addUserIdsForStatusAndProfileFetchingPoll).not.toHaveBeenCalled();
expect(addUserIdsForStatusFetchingPoll).not.toHaveBeenCalled();
});
});

View File

@ -3,7 +3,7 @@
import type {UserProfile} from '@mattermost/types/users';
import {addUserIdsForStatusAndProfileFetchingPoll} from 'mattermost-redux/actions/status_profile_polling';
import {addUserIdsForStatusFetchingPoll} from 'mattermost-redux/actions/status_profile_polling';
import {getStatusesByIds} from 'mattermost-redux/actions/users';
import {getCurrentChannelId} from 'mattermost-redux/selectors/entities/channels';
import {getIsUserStatusesConfigEnabled} from 'mattermost-redux/selectors/entities/common';
@ -52,7 +52,7 @@ export function addVisibleUsersInCurrentChannelToStatusPoll(): ActionFunc<boolea
// Both the users in the DM list and recent posts constitute for all the visible users in the current channel
const userIdsForStatus = Array.from(userIdsToFetchStatusFor);
if (userIdsForStatus.length > 0) {
dispatch(addUserIdsForStatusAndProfileFetchingPoll({userIdsForStatus}));
dispatch(addUserIdsForStatusFetchingPoll(userIdsForStatus));
}
return {data: true};

View File

@ -45,7 +45,7 @@ export default function ChannelController(props: Props) {
// This cleans up the status and profile setInterval of fetching poll we use to batch requests
// when fetching statuses and profiles for a list of users.
cleanUpStatusAndProfileFetchingPoll();
dispatch(cleanUpStatusAndProfileFetchingPoll());
};
}, []);

View File

@ -7,148 +7,42 @@ import type {UserProfile} from '@mattermost/types/users';
import {searchGroups} from 'mattermost-redux/actions/groups';
import {getNeededAtMentionedUsernamesAndGroups} from 'mattermost-redux/actions/posts';
import {getProfilesByIds, getProfilesByUsernames, getStatusesByIds} from 'mattermost-redux/actions/users';
import {
getProfilesByIds,
getProfilesByUsernames,
getStatusesByIds,
maxUserIdsPerProfilesRequest,
maxUserIdsPerStatusesRequest,
} from 'mattermost-redux/actions/users';
import {getCurrentUser, getCurrentUserId, getIsUserStatusesConfigEnabled, getUsers} from 'mattermost-redux/selectors/entities/common';
import {getUsersStatusAndProfileFetchingPollInterval} from 'mattermost-redux/selectors/entities/general';
import {getUserStatuses} from 'mattermost-redux/selectors/entities/users';
import type {ActionFunc, ActionFuncAsync} from 'mattermost-redux/types/actions';
const MAX_USER_IDS_PER_STATUS_REQUEST = 200; // users ids per 'users/status/ids'request
const MAX_USER_IDS_PER_PROFILES_REQUEST = 100; // users ids per 'users/ids' request
const pendingUserIdsForStatuses = new Set<string>();
const pendingUserIdsForProfiles = new Set<string>();
let intervalIdForFetchingPoll: NodeJS.Timeout | null = null;
type UserIdsSingleOrArray = Array<UserProfile['id']> | UserProfile['id'];
type AddUserIdsForStatusAndProfileFetchingPoll = {
userIdsForStatus?: UserIdsSingleOrArray;
userIdsForProfile?: UserIdsSingleOrArray;
}
import type {ActionFunc, ActionFuncAsync, ThunkActionFunc} from 'mattermost-redux/types/actions';
import {BackgroundDataLoader} from 'mattermost-redux/utils/data_loader';
/**
* Adds list(s) of user id(s) to the status and profile fetching poll. Which gets fetched based on user interval polling duration
* Do not use if status or profile is required immediately.
* Adds list(s) of user id(s) to the status fetching poll. Which gets fetched based on user interval polling duration
* Do not use if status is required immediately.
*/
export function addUserIdsForStatusAndProfileFetchingPoll({userIdsForStatus, userIdsForProfile}: AddUserIdsForStatusAndProfileFetchingPoll): ActionFunc<boolean> {
return (dispatch, getState) => {
function getPendingStatusesById() {
// Since we can only fetch a defined number of user statuses at a time, we need to batch the requests
if (pendingUserIdsForStatuses.size >= MAX_USER_IDS_PER_STATUS_REQUEST) {
// We use temp buffer here to store up until max buffer size
// and clear out processed user ids
const bufferedUserIds: string[] = [];
let bufferCounter = 0;
for (const pendingUserId of pendingUserIdsForStatuses) {
if (pendingUserId.length === 0) {
continue;
}
bufferedUserIds.push(pendingUserId);
pendingUserIdsForStatuses.delete(pendingUserId);
bufferCounter++;
if (bufferCounter >= MAX_USER_IDS_PER_STATUS_REQUEST) {
break;
}
}
if (bufferedUserIds.length > 0) {
dispatch(getStatusesByIds(bufferedUserIds));
}
} else {
// If we have less than max buffer size, we can directly fetch the statuses
const lessThanBufferUserIds = Array.from(pendingUserIdsForStatuses);
if (lessThanBufferUserIds.length > 0) {
dispatch(getStatusesByIds(lessThanBufferUserIds));
pendingUserIdsForStatuses.clear();
}
}
export function addUserIdsForStatusFetchingPoll(userIdsForStatus: Array<UserProfile['id']>): ActionFunc<boolean> {
return (dispatch, getState, {loaders}: any) => {
if (!loaders.pollingStatusLoader) {
loaders.pollingStatusLoader = new BackgroundDataLoader<UserProfile['id']>({
fetchBatch: (userIds) => dispatch(getStatusesByIds(userIds)),
maxBatchSize: maxUserIdsPerStatusesRequest,
});
}
function getPendingProfilesById() {
if (pendingUserIdsForProfiles.size >= MAX_USER_IDS_PER_PROFILES_REQUEST) {
const bufferedUserIds: Array<UserProfile['id']> = [];
let bufferCounter = 0;
for (const pendingUserId of pendingUserIdsForProfiles) {
if (pendingUserId.length === 0) {
continue;
}
bufferedUserIds.push(pendingUserId);
pendingUserIdsForProfiles.delete(pendingUserId);
bufferCounter++;
// We can only fetch a defined number of user profiles at a time
// So we break out of the loop if we reach the max batch size
if (bufferCounter >= MAX_USER_IDS_PER_PROFILES_REQUEST) {
break;
}
}
if (bufferedUserIds.length > 0) {
dispatch(getProfilesByIds(bufferedUserIds));
}
} else {
const lessThanBufferUserIds = Array.from(pendingUserIdsForProfiles);
if (lessThanBufferUserIds.length > 0) {
dispatch(getProfilesByIds(lessThanBufferUserIds));
pendingUserIdsForProfiles.clear();
}
}
}
loaders.pollingStatusLoader.queue(userIdsForStatus);
const pollingInterval = getUsersStatusAndProfileFetchingPollInterval(getState());
if (userIdsForStatus) {
if (Array.isArray(userIdsForStatus)) {
userIdsForStatus.forEach((userId) => {
if (userId.length > 0) {
pendingUserIdsForStatuses.add(userId);
}
});
} else {
pendingUserIdsForStatuses.add(userIdsForStatus);
}
}
if (userIdsForProfile) {
if (Array.isArray(userIdsForProfile)) {
userIdsForProfile.forEach((userId) => {
if (userId.length > 0) {
pendingUserIdsForProfiles.add(userId);
}
});
} else {
pendingUserIdsForProfiles.add(userIdsForProfile);
}
}
// Escape hatch to fetch immediately or when we haven't received the polling interval from config yet
if (!pollingInterval || pollingInterval <= 0) {
if (pendingUserIdsForStatuses.size > 0) {
getPendingStatusesById();
}
if (pendingUserIdsForProfiles.size > 0) {
getPendingProfilesById();
}
} else if (intervalIdForFetchingPoll === null) {
loaders.pollingStatusLoader.fetchBatchNow();
} else {
// Start the interval if it is not already running
intervalIdForFetchingPoll = setInterval(() => {
if (pendingUserIdsForStatuses.size > 0) {
getPendingStatusesById();
}
if (pendingUserIdsForProfiles.size > 0) {
getPendingProfilesById();
}
}, pollingInterval);
loaders.pollingStatusLoader.startIntervalIfNeeded(pollingInterval);
}
// Now here the interval is already running and we have added the user ids to the poll so we don't need to do anything
@ -156,11 +50,42 @@ export function addUserIdsForStatusAndProfileFetchingPoll({userIdsForStatus, use
};
}
export function cleanUpStatusAndProfileFetchingPoll() {
if (intervalIdForFetchingPoll !== null) {
clearInterval(intervalIdForFetchingPoll);
intervalIdForFetchingPoll = null;
}
/**
* Adds list(s) of user id(s) to the profile fetching poll. Which gets fetched based on user interval polling duration
* Do not use if profile is required immediately.
*/
export function addUserIdsForProfileFetchingPoll(userIdsForProfile: Array<UserProfile['id']>): ActionFunc<boolean> {
return (dispatch, getState, {loaders}: any) => {
if (!loaders.pollingProfileLoader) {
loaders.pollingProfileLoader = new BackgroundDataLoader<UserProfile['id']>({
fetchBatch: (userIds) => dispatch(getProfilesByIds(userIds)),
maxBatchSize: maxUserIdsPerProfilesRequest,
});
}
loaders.pollingProfileLoader.queue(userIdsForProfile);
const pollingInterval = getUsersStatusAndProfileFetchingPollInterval(getState());
// Escape hatch to fetch immediately or when we haven't received the polling interval from config yet
if (!pollingInterval || pollingInterval <= 0) {
loaders.pollingProfileLoader.fetchBatchNow();
} else {
// Start the interval if it is not already running
loaders.pollingProfileLoader.startIntervalIfNeeded(pollingInterval);
}
// Now here the interval is already running and we have added the user ids to the poll so we don't need to do anything
return {data: true};
};
}
export function cleanUpStatusAndProfileFetchingPoll(): ThunkActionFunc<void> {
return (dispatch, getState, {loaders}: any) => {
loaders.pollingStatusLoader?.stopInterval();
loaders.pollingProfileLoader?.stopInterval();
};
}
/**
@ -204,10 +129,10 @@ export function batchFetchStatusesProfilesGroupsFromPosts(postsArrayOrMap: Post[
const permalinkPostPreviewMetaData = embed.data as PostPreviewMetadata;
if (permalinkPostPreviewMetaData.post?.user_id && !users[permalinkPostPreviewMetaData.post.user_id] && permalinkPostPreviewMetaData.post.user_id !== currentUserId) {
dispatch(addUserIdsForStatusAndProfileFetchingPoll({userIdsForProfile: permalinkPostPreviewMetaData.post.user_id}));
dispatch(addUserIdsForProfileFetchingPoll([permalinkPostPreviewMetaData.post.user_id]));
}
if (permalinkPostPreviewMetaData.post?.user_id && !userStatuses[permalinkPostPreviewMetaData.post.user_id] && permalinkPostPreviewMetaData.post.user_id !== currentUserId && isUserStatusesConfigEnabled) {
dispatch(addUserIdsForStatusAndProfileFetchingPoll({userIdsForStatus: permalinkPostPreviewMetaData.post.user_id}));
dispatch(addUserIdsForStatusFetchingPoll([permalinkPostPreviewMetaData.post.user_id]));
}
}
});
@ -217,7 +142,7 @@ export function batchFetchStatusesProfilesGroupsFromPosts(postsArrayOrMap: Post[
if (post.metadata.acknowledgements) {
post.metadata.acknowledgements.forEach((ack: PostAcknowledgement) => {
if (ack.acknowledged_at > 0 && ack.user_id && !users[ack.user_id] && ack.user_id !== currentUserId) {
dispatch(addUserIdsForStatusAndProfileFetchingPoll({userIdsForProfile: ack.user_id}));
dispatch(addUserIdsForProfileFetchingPoll([ack.user_id]));
}
});
}
@ -226,13 +151,13 @@ export function batchFetchStatusesProfilesGroupsFromPosts(postsArrayOrMap: Post[
// This is sufficient to check if the profile is already fetched
// as we receive the websocket events for the profiles changes
if (!users[post.user_id] && post.user_id !== currentUserId) {
dispatch(addUserIdsForStatusAndProfileFetchingPoll({userIdsForProfile: post.user_id}));
dispatch(addUserIdsForProfileFetchingPoll([post.user_id]));
}
// This is sufficient to check if the status is already fetched
// as we do the polling for statuses for current channel's channel members every 1 minute in channel_controller
if (!userStatuses[post.user_id] && post.user_id !== currentUserId && isUserStatusesConfigEnabled) {
dispatch(addUserIdsForStatusAndProfileFetchingPoll({userIdsForStatus: post.user_id}));
dispatch(addUserIdsForStatusFetchingPoll([post.user_id]));
}
// We need to check for all @mentions in the post, they can be either users or groups

View File

@ -68,7 +68,7 @@ export function getThreadsForCurrentTeam({before = '', after = '', unread = fals
}
if (userThreadList?.threads?.length) {
await dispatch(getMissingProfilesByIds(uniq(userThreadList.threads.map(({participants}) => participants.map(({id}) => id)).flat())));
await dispatch(getMissingProfilesByIds(userThreadList.threads.map(({participants}) => participants.map(({id}) => id)).flat()));
dispatch({
type: PostTypes.RECEIVED_POSTS,
@ -138,7 +138,7 @@ export function getCountsAndThreadsSince(userId: string, teamId: string, since?:
const actions = [];
if (userThreadList?.threads?.length) {
await dispatch(getMissingProfilesByIds(uniq(userThreadList.threads.map(({participants}) => participants.map(({id}) => id)).flat())));
await dispatch(getMissingProfilesByIds(userThreadList.threads.map(({participants}) => participants.map(({id}) => id)).flat()));
actions.push({
type: PostTypes.RECEIVED_POSTS,
data: {posts: userThreadList.threads.map(({post}) => ({...post, update_at: 0}))},

View File

@ -7,19 +7,20 @@ import nock from 'nock';
import type {UserProfile} from '@mattermost/types/users';
import {UserTypes} from 'mattermost-redux/action_types';
import {GeneralTypes, UserTypes} from 'mattermost-redux/action_types';
import * as Actions from 'mattermost-redux/actions/users';
import {Client4} from 'mattermost-redux/client';
import {General, RequestStatus} from 'mattermost-redux/constants';
import deepFreeze from 'mattermost-redux/utils/deep_freeze';
import TestHelper from '../../test/test_helper';
import configureStore from '../../test/test_store';
import {RequestStatus} from '../constants';
const OK_RESPONSE = {status: 'OK'};
describe('Actions.Users', () => {
let store = configureStore();
beforeAll(() => {
TestHelper.initBasic(Client4);
});
@ -256,21 +257,252 @@ describe('Actions.Users', () => {
expect(profiles[user.id]).toBeTruthy();
});
it('getMissingProfilesByIds', async () => {
nock(Client4.getBaseRoute()).
post('/users').
reply(200, TestHelper.fakeUserWithId());
describe('getMissingProfilesByIds', () => {
const testUserId1 = 'testUser1';
const testUserId2 = 'testUser2';
const testUserId3 = 'testUser3';
const user = await TestHelper.basicClient4!.createUser(TestHelper.fakeUser(), '', '');
beforeEach(() => {
jest.useFakeTimers();
});
nock(Client4.getBaseRoute()).
post('/users/ids').
reply(200, [user]);
afterEach(() => {
expect(jest.getTimerCount()).toBe(0);
await store.dispatch(Actions.getMissingProfilesByIds([user.id]));
const {profiles} = store.getState().entities.users;
jest.useRealTimers();
});
expect(profiles[user.id]).toBeTruthy();
test('should be able to get a single user', async () => {
const profileMock = nock(Client4.getBaseRoute()).
post('/users/ids', [testUserId1]).
reply(200, [TestHelper.getUserMock({id: testUserId1})]);
const statusMock = nock(Client4.getBaseRoute()).
post('/users/status/ids', [testUserId1]).
reply(200, [{user_id: testUserId1, status: General.ONLINE}]);
const promise = store.dispatch(Actions.getMissingProfilesByIds([testUserId1]));
jest.advanceTimersToNextTimer();
await promise;
expect(profileMock.isDone()).toBe(true);
expect(statusMock.isDone()).toBe(true);
expect(store.getState().entities.users.profiles[testUserId1]).toMatchObject({id: testUserId1});
expect(store.getState().entities.users.statuses[testUserId1]).toEqual(General.ONLINE);
});
test('should be able to get multiple users', async () => {
const profileMock = nock(Client4.getBaseRoute()).
post('/users/ids', [testUserId1, testUserId2, testUserId3]).
reply(200, [
TestHelper.getUserMock({id: testUserId1}),
TestHelper.getUserMock({id: testUserId2}),
TestHelper.getUserMock({id: testUserId3}),
]);
const statusMock = nock(Client4.getBaseRoute()).
post('/users/status/ids', [testUserId1, testUserId2, testUserId3]).
reply(200, [
{user_id: testUserId1, status: General.ONLINE},
{user_id: testUserId2, status: General.ONLINE},
{user_id: testUserId3, status: General.ONLINE},
]);
const promise = store.dispatch(Actions.getMissingProfilesByIds([testUserId1, testUserId2, testUserId3]));
jest.advanceTimersToNextTimer();
await promise;
expect(profileMock.isDone()).toBe(true);
expect(statusMock.isDone()).toBe(true);
expect(store.getState().entities.users.profiles[testUserId1]).toMatchObject({id: testUserId1});
expect(store.getState().entities.users.statuses[testUserId1]).toEqual(General.ONLINE);
expect(store.getState().entities.users.profiles[testUserId2]).toMatchObject({id: testUserId2});
expect(store.getState().entities.users.statuses[testUserId2]).toEqual(General.ONLINE);
expect(store.getState().entities.users.profiles[testUserId3]).toMatchObject({id: testUserId3});
expect(store.getState().entities.users.statuses[testUserId3]).toEqual(General.ONLINE);
});
test('should batch requests to get users across multiple calls and dedupe IDs', async () => {
const profileMock = nock(Client4.getBaseRoute()).
post('/users/ids', [testUserId1, testUserId2, testUserId3]).
reply(200, [
TestHelper.getUserMock({id: testUserId1}),
TestHelper.getUserMock({id: testUserId2}),
TestHelper.getUserMock({id: testUserId3}),
]);
const statusMock = nock(Client4.getBaseRoute()).
post('/users/status/ids', [testUserId1, testUserId2, testUserId3]).
reply(200, [
{user_id: testUserId1, status: General.ONLINE},
{user_id: testUserId2, status: General.ONLINE},
{user_id: testUserId3, status: General.ONLINE},
]);
const promise = Promise.all([
store.dispatch(Actions.getMissingProfilesByIds([testUserId1])),
store.dispatch(Actions.getMissingProfilesByIds([testUserId2, testUserId3])),
store.dispatch(Actions.getMissingProfilesByIds([testUserId2])),
]);
jest.advanceTimersToNextTimer();
await promise;
expect(profileMock.isDone()).toBe(true);
expect(statusMock.isDone()).toBe(true);
expect(store.getState().entities.users.profiles[testUserId1]).toMatchObject({id: testUserId1});
expect(store.getState().entities.users.statuses[testUserId1]).toEqual(General.ONLINE);
expect(store.getState().entities.users.profiles[testUserId2]).toMatchObject({id: testUserId2});
expect(store.getState().entities.users.statuses[testUserId2]).toEqual(General.ONLINE);
expect(store.getState().entities.users.profiles[testUserId3]).toMatchObject({id: testUserId3});
expect(store.getState().entities.users.statuses[testUserId3]).toEqual(General.ONLINE);
});
test('should split requests for user IDs into multiple requests when necessary', async () => {
const idsPerBatch = Actions.maxUserIdsPerProfilesRequest;
const testUserIds: string[] = [];
for (let i = 0; i < idsPerBatch * 2.5; i++) {
testUserIds.push('testUser' + i);
}
const testUserIds1 = testUserIds.slice(0, idsPerBatch);
const testUserIds2 = testUserIds.slice(idsPerBatch, idsPerBatch * 2);
const testUserIds3 = testUserIds.slice(idsPerBatch * 2, idsPerBatch * 3);
const profileMock1 = nock(Client4.getBaseRoute()).
post('/users/ids', testUserIds1).
reply(200, testUserIds1.map((id) => TestHelper.getUserMock({id})));
const statusMock1 = nock(Client4.getBaseRoute()).
post('/users/status/ids', testUserIds1).
reply(200, testUserIds1.map((id) => ({user_id: id, status: General.ONLINE})));
const profileMock2 = nock(Client4.getBaseRoute()).
post('/users/ids', testUserIds2).
reply(200, testUserIds2.map((id) => TestHelper.getUserMock({id})));
const statusMock2 = nock(Client4.getBaseRoute()).
post('/users/status/ids', testUserIds2).
reply(200, testUserIds2.map((id) => ({user_id: id, status: General.ONLINE})));
const profileMock3 = nock(Client4.getBaseRoute()).
post('/users/ids', testUserIds3).
reply(200, testUserIds3.map((id) => TestHelper.getUserMock({id})));
const statusMock3 = nock(Client4.getBaseRoute()).
post('/users/status/ids', testUserIds3).
reply(200, testUserIds3.map((id) => ({user_id: id, status: General.ONLINE})));
for (const id of testUserIds) {
expect(store.getState().entities.users.profiles[id]).not.toBeDefined();
expect(store.getState().entities.users.statuses[id]).not.toEqual(General.ONLINE);
}
const promise = store.dispatch(Actions.getMissingProfilesByIds(testUserIds));
jest.advanceTimersToNextTimer();
jest.advanceTimersToNextTimer();
jest.advanceTimersToNextTimer();
await promise;
// Ensure that each of those requests were made
expect(profileMock1.isDone()).toBe(true);
expect(statusMock1.isDone()).toBe(true);
expect(profileMock2.isDone()).toBe(true);
expect(statusMock2.isDone()).toBe(true);
expect(profileMock3.isDone()).toBe(true);
expect(statusMock3.isDone()).toBe(true);
// And that all of the users were loaded
for (const id of testUserIds) {
expect(store.getState().entities.users.profiles[id]).toBeDefined();
expect(store.getState().entities.users.statuses[id]).toEqual(General.ONLINE);
}
});
test('should not request statuses when those are disabled', async () => {
const profileMock = nock(Client4.getBaseRoute()).
post('/users/ids', [testUserId1]).
reply(200, [TestHelper.getUserMock({id: testUserId1})]);
const statusMock = nock(Client4.getBaseRoute()).
post('/users/status/ids', [testUserId1]).
reply(200, [{user_id: testUserId1, status: General.ONLINE}]);
store.dispatch({
type: GeneralTypes.CLIENT_CONFIG_RECEIVED,
data: {
EnableUserStatuses: 'false',
},
});
const promise = store.dispatch(Actions.getMissingProfilesByIds([testUserId1]));
jest.advanceTimersToNextTimer();
await promise;
expect(profileMock.isDone()).toBe(true);
expect(statusMock.isDone()).toBe(false);
expect(store.getState().entities.users.profiles[testUserId1]).toMatchObject({id: testUserId1});
expect(store.getState().entities.users.statuses[testUserId1]).toBeUndefined();
});
});
describe('getMissingProfilesByUsernames', () => {
const testUserId1 = 'testUser1';
const testUsername1 = 'test_user_1';
const testUserId2 = 'testUser2';
const testUsername2 = 'test_user_2';
const testUserId3 = 'testUser3';
const testUsername3 = 'test_user_3';
beforeEach(() => {
jest.useFakeTimers();
});
afterEach(() => {
expect(jest.getTimerCount()).toBe(0);
jest.useRealTimers();
});
test('should be able to get a single user', async () => {
const profileMock = nock(Client4.getBaseRoute()).
post('/users/usernames', [testUsername1]).
reply(200, [TestHelper.getUserMock({id: testUserId1, username: testUsername1})]);
const promise = store.dispatch(Actions.getMissingProfilesByUsernames([testUsername1]));
jest.advanceTimersToNextTimer();
await promise;
expect(profileMock.isDone()).toBe(true);
expect(store.getState().entities.users.profiles[testUserId1]).toMatchObject({id: testUserId1});
});
test('should be able to get multiple users', async () => {
const profileMock = nock(Client4.getBaseRoute()).
post('/users/usernames', [testUsername1, testUsername2, testUsername3]).
reply(200, [
TestHelper.getUserMock({id: testUserId1, username: testUsername1}),
TestHelper.getUserMock({id: testUserId2, username: testUsername2}),
TestHelper.getUserMock({id: testUserId3, username: testUsername3}),
]);
const promise = store.dispatch(Actions.getMissingProfilesByUsernames([
testUsername1,
testUsername2,
testUsername3,
]));
jest.advanceTimersToNextTimer();
await promise;
expect(profileMock.isDone()).toBe(true);
expect(store.getState().entities.users.profiles[testUserId1]).toMatchObject({id: testUserId1});
expect(store.getState().entities.users.profiles[testUserId2]).toMatchObject({id: testUserId2});
expect(store.getState().entities.users.profiles[testUserId3]).toMatchObject({id: testUserId3});
});
});
it('getProfilesByUsernames', async () => {

View File

@ -21,10 +21,17 @@ import {General} from 'mattermost-redux/constants';
import {getIsUserStatusesConfigEnabled} from 'mattermost-redux/selectors/entities/common';
import {getServerVersion} from 'mattermost-redux/selectors/entities/general';
import {isCollapsedThreadsEnabled} from 'mattermost-redux/selectors/entities/preferences';
import {getCurrentUserId, getUsers} from 'mattermost-redux/selectors/entities/users';
import {getCurrentUserId, getUser as selectUser, getUsers, getUsersByUsername} from 'mattermost-redux/selectors/entities/users';
import type {ActionFuncAsync} from 'mattermost-redux/types/actions';
import {DelayedDataLoader} from 'mattermost-redux/utils/data_loader';
import {isMinimumServerVersion} from 'mattermost-redux/utils/helpers';
// Delay requests for missing profiles for up to 100ms to allow for simulataneous requests to be batched
const missingProfilesWait = 100;
export const maxUserIdsPerProfilesRequest = 100; // users ids per 'users/ids' request
export const maxUserIdsPerStatusesRequest = 200; // users ids per 'users/status/ids'request
export function generateMfaSecret(userId: string) {
return bindClientFunc({
clientFunc: Client4.generateMfaSecret,
@ -152,49 +159,61 @@ export function getProfiles(page = 0, perPage: number = General.PROFILE_CHUNK_SI
};
}
export function getMissingProfilesByIds(userIds: string[]): ActionFuncAsync<UserProfile[]> {
return async (dispatch, getState) => {
const state = getState();
const {profiles} = state.entities.users;
const enabledUserStatuses = getIsUserStatusesConfigEnabled(state);
const missingIds: string[] = [];
userIds.forEach((id) => {
if (!profiles[id]) {
missingIds.push(id);
}
});
if (missingIds.length > 0) {
if (enabledUserStatuses) {
dispatch(getStatusesByIds(missingIds));
}
return dispatch(getProfilesByIds(missingIds));
export function getMissingProfilesByIds(userIds: string[]): ActionFuncAsync<Array<UserProfile['id']>> {
return async (dispatch, getState, {loaders}: any) => {
if (!loaders.missingStatusLoader) {
loaders.missingStatusLoader = new DelayedDataLoader<UserProfile['id']>({
fetchBatch: (userIds) => dispatch(getStatusesByIds(userIds)),
maxBatchSize: maxUserIdsPerProfilesRequest,
wait: missingProfilesWait,
});
}
return {data: []};
if (!loaders.missingProfileLoader) {
loaders.missingProfileLoader = new DelayedDataLoader<UserProfile['id']>({
fetchBatch: (userIds) => dispatch(getProfilesByIds(userIds)),
maxBatchSize: maxUserIdsPerProfilesRequest,
wait: missingProfilesWait,
});
}
const state = getState();
const missingIds = userIds.filter((id) => !selectUser(state, id));
if (missingIds.length > 0) {
if (getIsUserStatusesConfigEnabled(state)) {
loaders.missingStatusLoader.queue(missingIds);
}
await loaders.missingProfileLoader.queueAndWait(missingIds);
}
return {
data: missingIds,
};
};
}
export function getMissingProfilesByUsernames(usernames: string[]): ActionFuncAsync<UserProfile[]> {
return async (dispatch, getState) => {
const {profiles} = getState().entities.users;
const usernameProfiles = Object.values(profiles).reduce((acc, profile: any) => {
acc[profile.username] = profile;
return acc;
}, {} as Record<string, UserProfile>);
const missingUsernames: string[] = [];
usernames.forEach((username) => {
if (!usernameProfiles[username]) {
missingUsernames.push(username);
}
});
if (missingUsernames.length > 0) {
return dispatch(getProfilesByUsernames(missingUsernames));
export function getMissingProfilesByUsernames(usernames: string[]): ActionFuncAsync<Array<UserProfile['username']>> {
return async (dispatch, getState, {loaders}: any) => {
if (!loaders.missingUsernameLoader) {
loaders.missingUsernameLoader = new DelayedDataLoader<UserProfile['username']>({
fetchBatch: (usernames) => dispatch(getProfilesByUsernames(usernames)),
maxBatchSize: maxUserIdsPerProfilesRequest,
wait: missingProfilesWait,
});
}
return {data: []};
const usersByUsername = getUsersByUsername(getState());
const missingUsernames = usernames.filter((username) => !usersByUsername[username]);
if (missingUsernames.length > 0) {
await loaders.missingUsernameLoader.queueAndWait(missingUsernames);
}
return {data: missingUsernames};
};
}

View File

@ -46,7 +46,13 @@ export default function configureStore<S extends GlobalState>({
autoPause: true,
});
const middleware = applyMiddleware(thunk);
const middleware = applyMiddleware(
// @hmhealey I've added this extra argument to Thunks to store information related to the store that can't be
// part of Redux state itself. At the moment, this is so that I can attach let DataLoaders dispatch actions.
// If you want to make use of this, talk to me first since I want to know more.
thunk.withExtraArgument({loaders: {}}),
);
const enhancers = composeEnhancers(middleware);

View File

@ -0,0 +1,529 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import {DelayedDataLoader, BackgroundDataLoader} from './data_loader';
jest.useFakeTimers();
describe('BackgroundDataLoader', () => {
const maxBatchSize = 10;
const period = 2000;
let loader: BackgroundDataLoader<string> | undefined;
afterEach(() => {
loader?.stopInterval();
expect(loader?.isBusy()).toBe(false);
expect(jest.getTimerCount()).toBe(0);
});
test('should periodically fetch data from server', () => {
const fetchBatch = jest.fn();
loader = new BackgroundDataLoader({
fetchBatch,
maxBatchSize,
});
loader.startIntervalIfNeeded(period);
loader.queue(['id1']);
expect(fetchBatch).not.toHaveBeenCalled();
jest.advanceTimersByTime(period - 1);
expect(fetchBatch).not.toHaveBeenCalled();
jest.advanceTimersByTime(1);
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1']);
loader.queue(['id2']);
expect(fetchBatch).toHaveBeenCalledTimes(1);
jest.advanceTimersByTime(period / 2);
loader.queue(['id3']);
loader.queue(['id4']);
expect(fetchBatch).toHaveBeenCalledTimes(1);
jest.advanceTimersByTime(period / 2);
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(fetchBatch).toHaveBeenCalledWith(['id2', 'id3', 'id4']);
});
test('should dedupe identifiers passed to queue', () => {
const fetchBatch = jest.fn();
loader = new BackgroundDataLoader({
fetchBatch,
maxBatchSize: 10,
});
loader.startIntervalIfNeeded(period);
loader.queue(['id1', 'id1', 'id1']);
loader.queue(['id2']);
loader.queue(['id2']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledWith(['id1', 'id2']);
});
test("shouldn't fetch data when nothing queue hasn't been called", () => {
const fetchBatch = jest.fn();
loader = new BackgroundDataLoader({
fetchBatch,
maxBatchSize,
});
loader.startIntervalIfNeeded(period);
expect(jest.getTimerCount()).toBe(1);
jest.advanceTimersToNextTimer();
expect(fetchBatch).not.toHaveBeenCalled();
});
test('should split requests into batches if too many IDs are added at once', () => {
const fetchBatch = jest.fn();
loader = new BackgroundDataLoader({
fetchBatch,
maxBatchSize: 3,
});
loader.startIntervalIfNeeded(period);
loader.queue(['id1', 'id2', 'id3', 'id4', 'id5']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1', 'id2', 'id3']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(fetchBatch).toHaveBeenCalledWith(['id4', 'id5']);
loader.queue(['id6']);
loader.queue(['id7']);
loader.queue(['id8']);
loader.queue(['id9']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(3);
expect(fetchBatch).toHaveBeenCalledWith(['id6', 'id7', 'id8']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(4);
expect(fetchBatch).toHaveBeenCalledWith(['id9']);
});
test('should stop fetching data after stopInterval is called', () => {
const fetchBatch = jest.fn();
loader = new BackgroundDataLoader({
fetchBatch,
maxBatchSize,
});
expect(jest.getTimerCount()).toBe(0);
loader.queue(['id1']);
loader.startIntervalIfNeeded(period);
expect(jest.getTimerCount()).toBe(1);
jest.advanceTimersByTime(period);
expect(jest.getTimerCount()).toBe(1);
expect(fetchBatch).toHaveBeenCalledTimes(1);
loader.stopInterval();
expect(jest.getTimerCount()).toBe(0);
jest.advanceTimersByTime(period);
expect(fetchBatch).toHaveBeenCalledTimes(1);
});
});
describe('DelayedDataLoader', () => {
const maxBatchSize = 10;
const wait = 50;
let loader: DelayedDataLoader<string> | undefined;
afterEach(() => {
expect(loader?.isBusy()).toBe(false);
expect(jest.getTimerCount()).toBe(0);
});
test('should send a batch of requests after the delay', () => {
const fetchBatch = jest.fn(() => Promise.resolve());
loader = new DelayedDataLoader({
fetchBatch,
maxBatchSize,
wait,
});
expect(jest.getTimerCount()).toBe(0);
loader.queue(['id1']);
expect(jest.getTimerCount()).toBe(1);
expect(fetchBatch).not.toHaveBeenCalled();
jest.advanceTimersByTime(wait / 2);
loader.queue(['id2']);
loader.queue(['id3']);
expect(jest.getTimerCount()).toBe(1);
expect(fetchBatch).not.toHaveBeenCalled();
jest.advanceTimersByTime(wait / 2);
expect(jest.getTimerCount()).toBe(0);
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1', 'id2', 'id3']);
});
test('should be able to send multiple batches of requests', () => {
const fetchBatch = jest.fn(() => Promise.resolve());
loader = new DelayedDataLoader({
fetchBatch,
maxBatchSize,
wait,
});
loader.queue(['id1']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1']);
loader.queue(['id2']);
loader.queue(['id3']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(fetchBatch).toHaveBeenCalledWith(['id2', 'id3']);
});
test('should be able to have multiple callers await on queueAndWait at once', async () => {
const fetchBatch = jest.fn().mockResolvedValue(true);
loader = new DelayedDataLoader({
fetchBatch,
maxBatchSize,
wait,
});
let firstResolved = false;
loader.queueAndWait(['id1']).then(() => {
firstResolved = true;
});
let secondResolved = false;
loader.queueAndWait(['id2']).then(() => {
secondResolved = true;
});
let thirdResolved = false;
loader.queueAndWait(['id3']).then(() => {
thirdResolved = true;
});
jest.advanceTimersByTime(wait - 1);
expect(jest.getTimerCount()).toBe(1);
expect(firstResolved).toBe(false);
expect(secondResolved).toBe(false);
expect(thirdResolved).toBe(false);
jest.advanceTimersByTime(1);
// The timer has run and fetchBatch has started, but the .then calls in this test won't have run yet
expect(jest.getTimerCount()).toBe(0);
expect(firstResolved).toBe(false);
expect(secondResolved).toBe(false);
expect(thirdResolved).toBe(false);
// We need to wait twice: once for fetchBatch to resolve and then once for the .then calls to resolve
await Promise.resolve();
await Promise.resolve();
expect(firstResolved).toBe(true);
expect(secondResolved).toBe(true);
expect(thirdResolved).toBe(true);
});
test('should be able to start a new batch while the first one is in-progress', async () => {
const fetchBatch = jest.fn().mockResolvedValue(true);
loader = new DelayedDataLoader({
fetchBatch,
maxBatchSize,
wait,
});
let firstResolved = false;
loader.queueAndWait(['id1']).then(() => {
firstResolved = true;
});
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1']);
expect(firstResolved).toBe(false);
let secondResolved = false;
loader.queueAndWait(['id2']).then(() => {
secondResolved = true;
});
// Wait twice as in the previous test
await Promise.resolve();
await Promise.resolve();
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(firstResolved).toBe(true);
expect(secondResolved).toBe(false);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(fetchBatch).toHaveBeenCalledWith(['id2']);
expect(secondResolved).toBe(false);
// Similar to above, wait once...
await Promise.resolve();
// ...start a third batch...
let thirdResolved = false;
loader.queueAndWait(['id3']).then(() => {
thirdResolved = true;
});
// and then wait the second time
await Promise.resolve();
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(secondResolved).toBe(true);
expect(thirdResolved).toBe(false);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(3);
expect(fetchBatch).toHaveBeenCalledWith(['id3']);
expect(thirdResolved).toBe(false);
await Promise.resolve();
await Promise.resolve();
expect(fetchBatch).toHaveBeenCalledTimes(3);
expect(thirdResolved).toBe(true);
});
test('should split requests into batches if too many IDs are added at once', () => {
const fetchBatch = jest.fn(() => Promise.resolve());
loader = new DelayedDataLoader({
fetchBatch,
maxBatchSize: 3,
wait,
});
loader.queue(['id1', 'id2', 'id3', 'id4', 'id5']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1', 'id2', 'id3']);
// A new timeout should have started to get the second batch of data
expect(jest.getTimerCount()).toBe(1);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(fetchBatch).toHaveBeenCalledWith(['id4', 'id5']);
});
test('should split requests into batches if too many IDs are added across multiple calls', () => {
const fetchBatch = jest.fn(() => Promise.resolve());
loader = new DelayedDataLoader({
fetchBatch,
maxBatchSize: 3,
wait,
});
loader.queue(['id1', 'id2']);
loader.queue(['id3', 'id4']);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1', 'id2', 'id3']);
// A new timeout should have started to get the second batch of data
expect(jest.getTimerCount()).toBe(1);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(fetchBatch).toHaveBeenCalledWith(['id4']);
});
test('should wait until all of the data requested is received before resolving a promise', async () => {
const fetchBatch = jest.fn(() => Promise.resolve());
loader = new DelayedDataLoader({
fetchBatch,
maxBatchSize: 3,
wait,
});
let firstResolved = false;
loader.queueAndWait(['id1', 'id2']).then(() => {
firstResolved = true;
});
let secondResolved = false;
loader.queueAndWait(['id3', 'id4']).then(() => {
secondResolved = true;
});
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1', 'id2', 'id3']);
expect(firstResolved).toBe(false);
expect(secondResolved).toBe(false);
await Promise.resolve();
await Promise.resolve();
// The first promise should be resolved since all of its data has been received, but the second one shouldn't
expect(firstResolved).toBe(true);
expect(secondResolved).toBe(false);
// A new timer should've started to get the rest of the data
expect(jest.getTimerCount()).toBe(1);
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(fetchBatch).toHaveBeenCalledWith(['id4']);
await Promise.resolve();
await Promise.resolve();
expect(firstResolved).toBe(true);
expect(secondResolved).toBe(true);
});
test('should correctly split and wait for data to be requested while still deduping identifiers', async () => {
const fetchBatch = jest.fn(() => Promise.resolve());
loader = new DelayedDataLoader({
fetchBatch,
maxBatchSize: 3,
wait,
});
let firstResolved = false;
loader.queueAndWait(['id1', 'id2']).then(() => {
firstResolved = true;
});
let secondResolved = false;
loader.queueAndWait(['id3', 'id2']).then(() => {
secondResolved = true;
});
let thirdResolved = false;
loader.queueAndWait(['id3', 'id4']).then(() => {
thirdResolved = true;
});
let fourthResolved = false;
loader.queueAndWait(['id2', 'id3']).then(() => {
fourthResolved = true;
});
let fifthResolved = false;
loader.queueAndWait(['id3', 'id4', 'id5', 'id6', 'id7']).then(() => {
fifthResolved = true;
});
// First batch
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(1);
expect(fetchBatch).toHaveBeenCalledWith(['id1', 'id2', 'id3']);
await Promise.resolve();
await Promise.resolve();
expect(firstResolved).toBe(true);
expect(secondResolved).toBe(true);
expect(thirdResolved).toBe(false);
expect(fourthResolved).toBe(true);
expect(fifthResolved).toBe(false);
// Second batch
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(2);
expect(fetchBatch).toHaveBeenCalledWith(['id4', 'id5', 'id6']);
await Promise.resolve();
await Promise.resolve();
expect(firstResolved).toBe(true);
expect(secondResolved).toBe(true);
expect(thirdResolved).toBe(true);
expect(fourthResolved).toBe(true);
expect(fifthResolved).toBe(false);
// Third batch
jest.advanceTimersToNextTimer();
expect(fetchBatch).toHaveBeenCalledTimes(3);
expect(fetchBatch).toHaveBeenCalledWith(['id7']);
await Promise.resolve();
await Promise.resolve();
expect(firstResolved).toBe(true);
expect(secondResolved).toBe(true);
expect(thirdResolved).toBe(true);
expect(fourthResolved).toBe(true);
expect(fifthResolved).toBe(true);
});
});

View File

@ -0,0 +1,204 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
/**
* A DataLoader is an object that can be used to batch requests for fetching objects from the server for performance
* reasons.
*/
abstract class DataLoader<Identifier, Result = unknown> {
protected readonly fetchBatch: (identifiers: Identifier[]) => Result;
private readonly maxBatchSize: number;
protected readonly pendingIdentifiers = new Set<Identifier>();
constructor(args: {
fetchBatch: (identifiers: Identifier[]) => Result;
maxBatchSize: number;
}) {
this.fetchBatch = args.fetchBatch;
this.maxBatchSize = args.maxBatchSize;
}
public queue(identifiersToLoad: Identifier[]): void {
for (const identifier of identifiersToLoad) {
if (!identifier) {
continue;
}
this.pendingIdentifiers.add(identifier);
}
}
/**
* prepareBatch removes an array of identifiers for data to be loaded from pendingIdentifiers and returns it. If
* pendingIdentifiers contains more than maxBatchSize identifiers, then only that many are returned, but if it
* contains fewer than that, all of the identifiers are returned and pendingIdentifiers is cleared.
*/
protected prepareBatch(): {identifiers: Identifier[]; moreToLoad: boolean} {
let nextBatch;
// Since we can only fetch a defined number of user statuses at a time, we need to batch the requests
if (this.pendingIdentifiers.size >= this.maxBatchSize) {
nextBatch = [];
// We use temp buffer here to store up until max buffer size
// and clear out processed user ids
for (const identifier of this.pendingIdentifiers) {
nextBatch.push(identifier);
this.pendingIdentifiers.delete(identifier);
if (nextBatch.length >= this.maxBatchSize) {
break;
}
}
} else {
// If we have less than max buffer size, we can directly fetch the statuses
nextBatch = Array.from(this.pendingIdentifiers);
this.pendingIdentifiers.clear();
}
return {
identifiers: nextBatch,
moreToLoad: this.pendingIdentifiers.size > 0,
};
}
/**
* isBusy is a method for testing which returns true if the DataLoader is waiting to request or receive any data.
*/
public isBusy(): boolean {
return this.pendingIdentifiers.size > 0;
}
}
/**
* A BackgroundDataLoader is an object that can be used to batch requests for fetching objects from the server. Instead
* of requesting data immediately, it will periodically check if any objects need to be requested from the server.
*
* It's intended to be used for loading low priority data such as information needed in response to WebSocket messages
* that the user won't see immediately.
*/
export class BackgroundDataLoader<Identifier, Result = unknown> extends DataLoader<Identifier, Result> {
private intervalId: number = -1;
public startIntervalIfNeeded(ms: number): void {
if (this.intervalId !== -1) {
return;
}
this.intervalId = window.setInterval(() => this.fetchBatchNow(), ms);
}
public stopInterval(): void {
clearInterval(this.intervalId);
this.intervalId = -1;
}
public fetchBatchNow(): void {
const {identifiers} = this.prepareBatch();
if (identifiers.length === 0) {
return;
}
this.fetchBatch(identifiers);
}
public isBusy(): boolean {
return super.isBusy() || this.intervalId !== -1;
}
}
/**
* A DelayedDataLoader is an object that can be used to batch requests for fetching objects from the server. Instead of
* requesting data immediately, it will wait for an amount of time and then send a request to the server for all of
* the data which would've been requested during that time.
*
* More specifically, when queue is first called, a timer will be started. Until that timer expires, any other
* calls to queue will have the provided identifiers added to the ones from the initial call. When the timer
* finally expires, the request will be sent to the server to fetch that data. After that, the timer will be reset and
* the next call to queue will start a new one.
*
* DelayedDataLoader is intended to be used for loading data for components which are unaware of each other and may appear
* in different places in the UI from each other which could otherwise send repeated requests for the same or similar
* data as one another.
*/
export class DelayedDataLoader<Identifier> extends DataLoader<Identifier, Promise<unknown>> {
private readonly wait: number = -1;
private timeoutId: number = -1;
private timeoutCallbacks = new Set<{
identifiers: Set<Identifier>;
resolve(): void;
}>();
constructor(args: {
fetchBatch: (identifiers: Identifier[]) => Promise<unknown>;
maxBatchSize: number;
wait: number;
}) {
super(args);
this.wait = args.wait;
}
public queue(identifiersToLoad: Identifier[]): void {
super.queue(identifiersToLoad);
this.startTimeoutIfNeeded();
}
public queueAndWait(identifiersToLoad: Identifier[]): Promise<void> {
return new Promise((resolve) => {
super.queue(identifiersToLoad);
// Save the callback that will resolve this promise so that the caller of this method can wait for its
// data to be loaded
this.timeoutCallbacks.add({
identifiers: new Set(identifiersToLoad),
resolve,
});
this.startTimeoutIfNeeded();
});
}
private startTimeoutIfNeeded(): void {
if (this.timeoutId !== -1) {
return;
}
this.timeoutId = window.setTimeout(() => {
// Ensure that timeoutId is cleared and we get a pop identifiers off of pendingIdentifiers before doing
// anything async so that any calls to queue that are made while fetching this batch will be
// added to the next batch instead
this.timeoutId = -1;
const {identifiers, moreToLoad} = this.prepareBatch();
// Start another timeout if there's still more data to load
if (moreToLoad) {
this.startTimeoutIfNeeded();
}
this.fetchBatch(identifiers).then(() => this.resolveCompletedCallbacks(identifiers));
}, this.wait);
}
private resolveCompletedCallbacks(identifiers: Identifier[]): void {
for (const callback of this.timeoutCallbacks) {
for (const identifier of identifiers) {
callback.identifiers.delete(identifier);
}
if (callback.identifiers.size === 0) {
this.timeoutCallbacks.delete(callback);
callback.resolve();
}
}
}
public isBusy(): boolean {
return super.isBusy() || this.timeoutCallbacks.size > 0 || this.timeoutId !== -1;
}
}

View File

@ -16,7 +16,9 @@ import type {GlobalState} from 'types/store';
import {defaultIntl} from './helpers/intl-test-helper';
export default function testConfigureStore<State extends GlobalState>(initialState?: DeepPartial<State>) {
return configureStore<State, ThunkDispatch<State, Record<string, never>, AnyAction>>([thunk])(initialState as State);
return configureStore<State, ThunkDispatch<State, Record<string, never>, AnyAction>>([
thunk.withExtraArgument({loaders: {}}),
])(initialState as State);
}
export function mockStore<State extends GlobalState>(initialState?: DeepPartial<State>, intl = defaultIntl) {