diff --git a/package.json b/package.json index 172c24ecb10..959f9e96ac0 100644 --- a/package.json +++ b/package.json @@ -126,7 +126,6 @@ "@types/lingui__macro": "^3", "@types/lodash": "4.14.149", "@types/logfmt": "^1.2.1", - "@types/lru-cache": "^5.1.0", "@types/mousetrap": "1.6.3", "@types/node": "16.11.22", "@types/papaparse": "5.3.2", @@ -309,7 +308,7 @@ "lezer-tree": "0.13.2", "lodash": "4.17.21", "logfmt": "^1.3.2", - "lru-cache": "6.0.0", + "lru-cache": "7.3.1", "memoize-one": "6.0.0", "moment": "2.29.1", "moment-timezone": "0.5.34", diff --git a/public/app/plugins/datasource/loki/components/__snapshots__/LokiExploreQueryEditor.test.tsx.snap b/public/app/plugins/datasource/loki/components/__snapshots__/LokiExploreQueryEditor.test.tsx.snap index e41e5c921ef..5562e353c36 100644 --- a/public/app/plugins/datasource/loki/components/__snapshots__/LokiExploreQueryEditor.test.tsx.snap +++ b/public/app/plugins/datasource/loki/components/__snapshots__/LokiExploreQueryEditor.test.tsx.snap @@ -67,38 +67,168 @@ exports[`LokiExploreQueryEditor should render component 1`] = ` "labelFetchTs": 0, "labelKeys": Array [], "labelsCache": LRUCache { - Symbol(max): 10, - Symbol(lengthCalculator): [Function], - Symbol(allowStale): false, - Symbol(maxAge): 0, - Symbol(dispose): undefined, - Symbol(noDisposeOnSet): false, - Symbol(updateAgeOnGet): false, - Symbol(cache): Map {}, - Symbol(lruList): Yallist { - "head": null, + "allowStale": false, + "disposeAfter": null, + "disposed": null, + "free": Stack { + "heap": Uint8Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ], "length": 0, - "tail": null, }, - Symbol(length): 0, + "head": 0, + "initialFill": 1, + "keyList": Array [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + ], + "keyMap": Map {}, + "max": 10, + "maxSize": 0, + "next": Uint8Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ], + "noDisposeOnSet": false, + "prev": Uint8Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ], + "size": 0, + "sizeCalculation": undefined, + "tail": 0, + "ttl": 0, + "ttlAutopurge": false, + "ttlResolution": 1, + "updateAgeOnGet": false, + "valList": Array [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + ], }, "lookupsDisabled": false, "request": [Function], "seriesCache": LRUCache { - Symbol(max): 10, - Symbol(lengthCalculator): [Function], - Symbol(allowStale): false, - Symbol(maxAge): 0, - Symbol(dispose): undefined, - Symbol(noDisposeOnSet): false, - Symbol(updateAgeOnGet): false, - Symbol(cache): Map {}, - Symbol(lruList): Yallist { - "head": null, + "allowStale": false, + "disposeAfter": null, + "disposed": null, + "free": Stack { + "heap": Uint8Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ], "length": 0, - "tail": null, }, - Symbol(length): 0, + "head": 0, + "initialFill": 1, + "keyList": Array [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + ], + "keyMap": Map {}, + "max": 10, + "maxSize": 0, + "next": Uint8Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ], + "noDisposeOnSet": false, + "prev": Uint8Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ], + "size": 0, + "sizeCalculation": undefined, + "tail": 0, + "ttl": 0, + "ttlAutopurge": false, + "ttlResolution": 1, + "updateAgeOnGet": false, + "valList": Array [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + ], }, "start": [Function], "started": false, diff --git a/public/app/plugins/datasource/loki/language_provider.ts b/public/app/plugins/datasource/loki/language_provider.ts index 80bf63afd0d..ef4691fdd24 100644 --- a/public/app/plugins/datasource/loki/language_provider.ts +++ b/public/app/plugins/datasource/loki/language_provider.ts @@ -78,8 +78,8 @@ export default class LokiLanguageProvider extends LanguageProvider { * not account for different size of a response. If that is needed a `length` function can be added in the options. * 10 as a max size is totally arbitrary right now. */ - private seriesCache = new LRU>(10); - private labelsCache = new LRU(10); + private seriesCache = new LRU>({ max: 10 }); + private labelsCache = new LRU({ max: 10 }); constructor(datasource: LokiDatasource, initialValues?: any) { super(); diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index 1a4c1395024..2397cb8c7ed 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -72,7 +72,7 @@ export class PrometheusDatasource access: 'direct' | 'proxy'; basicAuth: any; withCredentials: any; - metricsNameCache = new LRU(10); + metricsNameCache = new LRU({ max: 10 }); interval: string; queryTimeout: string | undefined; httpMethod: string; diff --git a/public/app/plugins/datasource/prometheus/language_provider.ts b/public/app/plugins/datasource/prometheus/language_provider.ts index 1e1a63df6b5..6d439b38e26 100644 --- a/public/app/plugins/datasource/prometheus/language_provider.ts +++ b/public/app/plugins/datasource/prometheus/language_provider.ts @@ -90,7 +90,7 @@ export default class PromQlLanguageProvider extends LanguageProvider { * not account for different size of a response. If that is needed a `length` function can be added in the options. * 10 as a max size is totally arbitrary right now. */ - private labelsCache = new LRU>(10); + private labelsCache = new LRU>({ max: 10 }); constructor(datasource: PrometheusDatasource, initialValues?: Partial) { super(); diff --git a/public/app/types/lru-cache.d.ts b/public/app/types/lru-cache.d.ts new file mode 100644 index 00000000000..f515ec58fe6 --- /dev/null +++ b/public/app/types/lru-cache.d.ts @@ -0,0 +1,259 @@ +// Type definitions for lru-cache 7.1.0 +// TypeScript Version: 4.5 +declare class LRUCache { + constructor(options?: LRUCache.Options); + + /** + * Return total length of objects in cache taking into account `length` options function. + */ + readonly length: number; + + /** + * Return total quantity of objects currently in cache. Note, + * that `stale` (see options) items are returned as part of this item count. + */ + readonly itemCount: number; + + /** + * Same as Options.allowStale. + */ + allowStale: boolean; + + /** + * Same as Options.length. + */ + lengthCalculator(value: V): number; + + /** + * Same as Options.max. Resizes the cache when the `max` changes. + */ + max: number; + + /** + * Same as Options.maxAge. Resizes the cache when the `maxAge` changes. + */ + maxAge: number; + + /** + * Will update the "recently used"-ness of the key. They do what you think. + * `maxAge` is optional and overrides the cache `maxAge` option if provided. + */ + set(key: K, value: V, options?: LRUCache.SetOptions): boolean; + + /** + * Will update the "recently used"-ness of the key. They do what you think. + * `maxAge` is optional and overrides the cache `maxAge` option if provided. + * + * If the key is not found, will return `undefined`. + */ + get(key: K): V | undefined; + + /** + * Returns the key value (or `undefined` if not found) without updating + * the "recently used"-ness of the key. + * + * (If you find yourself using this a lot, you might be using the wrong + * sort of data structure, but there are some use cases where it's handy.) + */ + peek(key: K): V | undefined; + + /** + * Check if a key is in the cache, without updating the recent-ness + * or deleting it for being stale. + */ + has(key: K): boolean; + + /** + * Deletes a key out of the cache. + */ + del(key: K): void; + + /** + * Clear the cache entirely, throwing away all values. + */ + reset(): void; + + /** + * Manually iterates over the entire cache proactively pruning old entries. + */ + prune(): void; + + /** + * Just like `Array.prototype.forEach`. Iterates over all the keys in the cache, + * in order of recent-ness. (Ie, more recently used items are iterated over first.) + */ + forEach(callbackFn: (this: T, value: V, key: K, cache: this) => void, thisArg?: T): void; + + /** + * The same as `cache.forEach(...)` but items are iterated over in reverse order. + * (ie, less recently used items are iterated over first.) + */ + forEach(callbackFn: (this: T, value: V, key: K, cache: this) => void, thisArg?: T): void; + + /** + * Return an array of the keys in the cache. + */ + keys(): K[]; + + /** + * Return an array of the values in the cache. + */ + values(): V[]; + + /** + * Return an array of the cache entries ready for serialization and usage with `destinationCache.load(arr)`. + */ + dump(): Array>; + + /** + * Loads another cache entries array, obtained with `sourceCache.dump()`, + * into the cache. The destination cache is reset before loading new entries + * + * @param cacheEntries Obtained from `sourceCache.dump()` + */ + load(cacheEntries: ReadonlyArray>): void; +} + +// eslint-disable-next-line no-redeclare +declare namespace LRUCache { + interface Options { + /** + * @type {number | undefined} + * the number of most recently used items to keep. + * note that we may store fewer items than this if maxSize is hit. + */ + max?: number | undefined; + + // if you wish to track item size, you must provide a maxSize + // note that we still will only keep up to max *actual items*, + // so size tracking may cause fewer than max items to be stored. + // At the extreme, a single item of maxSize size will cause everything + // else in the cache to be dropped when it is added. Use with caution! + // Note also that size tracking can negatively impact performance, + // though for most cases, only minimally. + maxSize?: number | undefined; + + // buffers or other items where memory size depends on the object itself. + // also note that oversized items do NOT immediately get dropped from + // the cache, though they will cause faster turnover in the storage. + // Return an positive integer which is the size of the item, + // if a positive integer is not returned, will use 0 as the size. + sizeCalculation?: (value, key) => number; + + // function to call when the item is removed from the cache + // Note that using this can negatively impact performance. + dispose?: (value, key) => void; + + /** + * By default, if you set a `dispose()` method, then it'll be called whenever + * a `set()` operation overwrites an existing key. If you set this option, + * `dispose()` will only be called when a key falls out of the cache, + * not when it is overwritten. + */ + noDisposeOnSet?: boolean | undefined; + + // max time to live for items before they are considered stale + // note that stale items are NOT preemptively removed by default, + // and MAY live in the cache, contributing to its LRU max, long after + // they have expired. + // Also, as this cache is optimized for LRU/MRU operations, some of + // the staleness/TTL checks will reduce performance, as they will incur + // overhead by deleting items. + // Must be a positive integer in ms, defaults to 0, which means "no TTL" + ttl?: number; + + // Minimum amount of time in ms in which to check for staleness. + // Defaults to 1, which means that the current time + // is checked at most once per millisecond. + // Set to 0 to check the current time every time staleness is tested. + // Note that setting this to a higher value + // will improve performance somewhat while using ttl tracking, + // albeit at the expense of keeping stale items + // around a bit longer than intended. + ttlResolution?: number; + + // Preemptively remove stale items from the cache. + // Note that this may significantly degrade performance, + // especially if the cache is storing a large number of items. + // It is almost always best to just leave the stale items in the cache, + // and let them fall out as new items are added. + // Note that this means that allowStale is a bit pointless, + // as stale items will be deleted almost as soon as they expire. + // Use with caution! + ttlAutopurge?: boolean; + + // By default, if you set ttl, it'll only delete stale items + // from the cache when you get(key). + // That is, it's not preemptively pruning items. + // If you set allowStale:true, it'll return the stale value + // as well as deleting it. If you don't set this, + // then it'll return undefined when you try to get a stale entry. + // Note that when a stale entry is fetched, + // even if it is returned due to allowStale being set, + // it is removed from the cache immediately. + // You can immediately put it back in the cache if you wish, + // thus resetting the TTL. + // This may be overridden by passing an options object to cache.get(). + // The cache.has() method will always return false for stale items. + // Boolean, default false, only relevant if ttl is set. + allowStale?: boolean; + + // When using time-expiring entries with ttl, + // setting this to true will make each item's + // age reset to 0 whenever it is retrieved from cache with get(), + // causing it to not expire. + // (It can still fall out of cache based on recency of use, of course.) + // This may be overridden by passing an options object to cache.get(). + // Boolean, default false, only relevant if ttl is set. + updateAgeOnGet?: boolean; + + // update the age of items on cache.has(), renewing their TTL + // boolean, default false + updateAgeOnHas?: boolean; + + // update the "recently-used"-ness of items on cache.has() + // boolean, default false + updateRecencyOnHas?: boolean; + + /** + * Function that is used to calculate the length of stored items. + * If you're storing strings or buffers, then you probably want to do + * something like `function(n, key){return n.length}`. The default + * is `function(){return 1}`, which is fine if you want to store + * `max` like-sized things. The item is passed as the first argument, + * and the key is passed as the second argument. + */ + length?(value: V, key?: K): number; + + /** + * By default, if you set a `maxAge`, it'll only actually pull stale items + * out of the cache when you `get(key)`. (That is, it's not pre-emptively + * doing a `setTimeout` or anything.) If you set `stale:true`, it'll return + * the stale value before deleting it. If you don't set this, then it'll + * return `undefined` when you try to get a stale entry, + * as if it had already been deleted. + */ + stale?: boolean | undefined; + } + + interface SetOptions { + ttl?: number; + // Will prevent calling the sizeCalculation function + // and just use the specified number if it is a positive integer + size?: number; + // Same as above + sizeCalculator?: (value: V) => number; + // Will prevent calling a dispose function in the case of overwrites + noDisposeOnSet?: boolean; + } + + interface Entry { + k: K; + v: V; + e: number; + } +} + +declare module 'lru-cache' { + export = LRUCache; +} diff --git a/yarn.lock b/yarn.lock index d9669eb6682..f86ad710e33 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10226,13 +10226,6 @@ __metadata: languageName: node linkType: hard -"@types/lru-cache@npm:^5.1.0": - version: 5.1.1 - resolution: "@types/lru-cache@npm:5.1.1" - checksum: e1d6c0085f61b16ec5b3073ec76ad1be4844ea036561c3f145fc19f71f084b58a6eb600b14128aa95809d057d28f1d147c910186ae51219f58366ffd2ff2e118 - languageName: node - linkType: hard - "@types/marked@npm:4.0.2": version: 4.0.2 resolution: "@types/marked@npm:4.0.2" @@ -20372,7 +20365,6 @@ __metadata: "@types/lingui__macro": ^3 "@types/lodash": 4.14.149 "@types/logfmt": ^1.2.1 - "@types/lru-cache": ^5.1.0 "@types/mousetrap": 1.6.3 "@types/node": 16.11.22 "@types/papaparse": 5.3.2 @@ -20485,7 +20477,7 @@ __metadata: lint-staged: 12.3.3 lodash: 4.17.21 logfmt: ^1.3.2 - lru-cache: 6.0.0 + lru-cache: 7.3.1 memoize-one: 6.0.0 mini-css-extract-plugin: 2.5.3 moment: 2.29.1 @@ -25130,12 +25122,10 @@ __metadata: languageName: node linkType: hard -"lru-cache@npm:6.0.0, lru-cache@npm:^6.0.0": - version: 6.0.0 - resolution: "lru-cache@npm:6.0.0" - dependencies: - yallist: ^4.0.0 - checksum: f97f499f898f23e4585742138a22f22526254fdba6d75d41a1c2526b3b6cc5747ef59c5612ba7375f42aca4f8461950e925ba08c991ead0651b4918b7c978297 +"lru-cache@npm:7.3.1": + version: 7.3.1 + resolution: "lru-cache@npm:7.3.1" + checksum: 34bb50c015ffc29fd83545e912f28cea6e03fbf41c497fa220c4f131b990f9ddf95babac98745b416cbc6c0d835254d61668d09b8a4ecb476934546afc9e51bd languageName: node linkType: hard @@ -25148,6 +25138,15 @@ __metadata: languageName: node linkType: hard +"lru-cache@npm:^6.0.0": + version: 6.0.0 + resolution: "lru-cache@npm:6.0.0" + dependencies: + yallist: ^4.0.0 + checksum: f97f499f898f23e4585742138a22f22526254fdba6d75d41a1c2526b3b6cc5747ef59c5612ba7375f42aca4f8461950e925ba08c991ead0651b4918b7c978297 + languageName: node + linkType: hard + "lru-memoize@npm:^1.1.0": version: 1.1.0 resolution: "lru-memoize@npm:1.1.0"