PERF: Add cache for Category.asyncFindByIds (#25531)

This way, we don't fetch the same category multiple times.
This commit is contained in:
Daniel Waterworth 2024-02-02 14:13:12 -06:00 committed by GitHub
parent a239e8f27f
commit 5f06d6490d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 210 additions and 3 deletions

View File

@ -0,0 +1,54 @@
// Used for Category.asyncFindByIds
//
// It's a cache that handles multiple lookups at a time.
export class MultiCache {
constructor(cb) {
this.cb = cb;
this.values = new Map();
this.fetchTimes = [];
}
reset() {
this.values = new Map();
this.fetchTimes = [];
}
hadTooManyCalls() {
const [t1, t2] = this.fetchTimes;
return t1 && t2 && t2 - t1 < 1000;
}
async fetch(ids) {
this.fetchTimes = [this.fetchTimes[this.fetchTimes.length - 1], new Date()];
const notFound = [];
for (const id of ids) {
if (!this.values.has(id)) {
notFound.push(id);
}
}
if (notFound.length !== 0) {
const request = this.cb(notFound);
for (const id of notFound) {
this.values.set(id, request);
}
request.catch(() => {
for (const id of notFound) {
this.values.delete(id);
}
});
}
const response = new Map();
for (const id of ids) {
response.set(id, (await this.values.get(id)).get(id));
}
return response;
}
}

View File

@ -1,3 +1,4 @@
import { warn } from "@ember/debug";
import { get } from "@ember/object"; import { get } from "@ember/object";
import { ajax } from "discourse/lib/ajax"; import { ajax } from "discourse/lib/ajax";
import { NotificationLevels } from "discourse/lib/notification-levels"; import { NotificationLevels } from "discourse/lib/notification-levels";
@ -8,6 +9,7 @@ import User from "discourse/models/user";
import { getOwnerWithFallback } from "discourse-common/lib/get-owner"; import { getOwnerWithFallback } from "discourse-common/lib/get-owner";
import getURL from "discourse-common/lib/get-url"; import getURL from "discourse-common/lib/get-url";
import discourseComputed, { on } from "discourse-common/utils/decorators"; import discourseComputed, { on } from "discourse-common/utils/decorators";
import { MultiCache } from "discourse-common/utils/multi-cache";
const STAFF_GROUP_NAME = "staff"; const STAFF_GROUP_NAME = "staff";
const CATEGORY_ASYNC_SEARCH_CACHE = {}; const CATEGORY_ASYNC_SEARCH_CACHE = {};
@ -361,6 +363,18 @@ const Category = RestModel.extend({
let _uncategorized; let _uncategorized;
const categoryMultiCache = new MultiCache(async (ids) => {
const result = await ajax("/categories/find", { data: { ids } });
return new Map(
result["categories"].map((category) => [category.id, category])
);
});
export function resetCategoryCache() {
categoryMultiCache.reset();
}
Category.reopenClass({ Category.reopenClass({
// Sort subcategories directly under parents // Sort subcategories directly under parents
sortCategories(categories) { sortCategories(categories) {
@ -468,10 +482,15 @@ Category.reopenClass({
}, },
async asyncFindByIds(ids = []) { async asyncFindByIds(ids = []) {
const result = await ajax("/categories/find", { data: { ids } }); const result = await categoryMultiCache.fetch(ids);
if (categoryMultiCache.hadTooManyCalls()) {
warn(
"Multiple calls to Category.asyncFindByIds within a second. Could they be combined?"
);
}
const categories = result["categories"].map((category) => const categories = ids.map((id) =>
Site.current().updateCategory(category) Site.current().updateCategory(result.get(id))
); );
// Update loadedCategoryIds list // Update loadedCategoryIds list

View File

@ -21,6 +21,7 @@ import { resetSettings as resetThemeSettings } from "discourse/lib/theme-setting
import { ScrollingDOMMethods } from "discourse/mixins/scrolling"; import { ScrollingDOMMethods } from "discourse/mixins/scrolling";
import Session from "discourse/models/session"; import Session from "discourse/models/session";
import User from "discourse/models/user"; import User from "discourse/models/user";
import { resetCategoryCache } from "discourse/models/category";
import SiteSettingService from "discourse/services/site-settings"; import SiteSettingService from "discourse/services/site-settings";
import { flushMap } from "discourse/services/store"; import { flushMap } from "discourse/services/store";
import pretender, { import pretender, {
@ -333,6 +334,8 @@ export default function setupTests(config) {
PreloadStore.reset(); PreloadStore.reset();
resetSite(); resetSite();
resetCategoryCache();
sinon.stub(ScrollingDOMMethods, "screenNotFull"); sinon.stub(ScrollingDOMMethods, "screenNotFull");
sinon.stub(ScrollingDOMMethods, "bindOnScroll"); sinon.stub(ScrollingDOMMethods, "bindOnScroll");
sinon.stub(ScrollingDOMMethods, "unbindOnScroll"); sinon.stub(ScrollingDOMMethods, "unbindOnScroll");

View File

@ -0,0 +1,131 @@
import { module, test } from "qunit";
import { MultiCache } from "discourse-common/utils/multi-cache";
module("Unit | Utils | multi-cache", function (hooks) {
let requests, cache;
hooks.beforeEach(() => {
requests = [];
cache = new MultiCache(
(ids) =>
new Promise((resolve, reject) => {
requests.push({ ids, resolve, reject });
})
);
});
test("cache miss then hit", async (assert) => {
const missResponse = cache.fetch([10]);
assert.strictEqual(requests.length, 1, "one triggered request");
const [request] = requests;
requests.clear();
assert.deepEqual(request.ids, [10], "request has correct ids");
request.resolve(new Map([[10, "foo"]]));
const missResult = await missResponse;
assert.strictEqual(missResult.constructor, Map, "response is a Map");
assert.strictEqual(missResult.size, 1, "response has one entry");
assert.strictEqual(missResult.get(10), "foo", "response entry is correct");
const hitResponse = cache.fetch([10]);
assert.strictEqual(requests.length, 0);
const hitResult = await hitResponse;
assert.strictEqual(hitResult.constructor, Map, "second response is a Map");
assert.strictEqual(hitResult.size, 1, "second response has one entry");
assert.strictEqual(
hitResult.get(10),
"foo",
"second response entry is correct"
);
});
test("failure then refetch", async (assert) => {
const response1 = cache.fetch([10]);
assert.strictEqual(requests.length, 1);
const [request1] = requests;
requests.clear();
assert.deepEqual(request1.ids, [10]);
request1.reject();
assert.rejects(response1);
try {
await response1;
} catch (e) {}
const response2 = cache.fetch([10]);
assert.strictEqual(requests.length, 1);
const [request2] = requests;
assert.deepEqual(request2.ids, [10]);
request2.resolve(new Map([[10, "foo"]]));
const result = await response2;
assert.strictEqual(result.constructor, Map);
assert.strictEqual(result.size, 1);
assert.strictEqual(result.get(10), "foo");
});
test("multiple requests before resolution", async (assert) => {
const response1 = cache.fetch([10]);
const response2 = cache.fetch([10]);
assert.strictEqual(requests.length, 1);
const [request] = requests;
assert.deepEqual(request.ids, [10]);
request.resolve(new Map([[10, "foo"]]));
for (const response of [response1, response2]) {
const result = await response;
assert.strictEqual(result.constructor, Map);
assert.strictEqual(result.size, 1);
assert.strictEqual(result.get(10), "foo");
}
});
test("overlapping requests", async (assert) => {
const response1 = cache.fetch([10, 20]);
const response2 = cache.fetch([10, 30]);
assert.strictEqual(requests.length, 2);
const [request1, request2] = requests;
assert.deepEqual(request1.ids, [10, 20]);
assert.deepEqual(request2.ids, [30]);
request1.resolve(
new Map([
[10, "foo"],
[20, "bar"],
])
);
request2.resolve(new Map([[30, "baz"]]));
const result1 = await response1;
assert.strictEqual(result1.constructor, Map);
assert.strictEqual(result1.size, 2);
assert.strictEqual(result1.get(10), "foo");
assert.strictEqual(result1.get(20), "bar");
const result2 = await response2;
assert.strictEqual(result2.constructor, Map);
assert.strictEqual(result2.size, 2);
assert.strictEqual(result2.get(10), "foo");
assert.strictEqual(result2.get(30), "baz");
});
});