UNPKG

memize

Version:

Unabashedly-barebones memoization library with an aim toward speed

161 lines (132 loc) 4.12 kB
/** * Memize options object. * * @typedef MemizeOptions * * @property {number} [maxSize] Maximum size of the cache. */ /** * Internal cache entry. * * @typedef MemizeCacheNode * * @property {?MemizeCacheNode|undefined} [prev] Previous node. * @property {?MemizeCacheNode|undefined} [next] Next node. * @property {Array<*>} args Function arguments for cache * entry. * @property {*} val Function result. */ /** * Properties of the enhanced function for controlling cache. * * @typedef MemizeMemoizedFunction * * @property {()=>void} clear Clear the cache. */ /** * Accepts a function to be memoized, and returns a new memoized function, with * optional options. * * @template {(...args: any[]) => any} F * * @param {F} fn Function to memoize. * @param {MemizeOptions} [options] Options object. * * @return {((...args: Parameters<F>) => ReturnType<F>) & MemizeMemoizedFunction} Memoized function. */ function memize(fn, options) { var size = 0; /** @type {?MemizeCacheNode|undefined} */ var head; /** @type {?MemizeCacheNode|undefined} */ var tail; options = options || {}; function memoized(/* ...args */) { var node = head, len = arguments.length, args, i; searchCache: while (node) { // Perform a shallow equality test to confirm that whether the node // under test is a candidate for the arguments passed. Two arrays // are shallowly equal if their length matches and each entry is // strictly equal between the two sets. Avoid abstracting to a // function which could incur an arguments leaking deoptimization. // Check whether node arguments match arguments length if (node.args.length !== arguments.length) { node = node.next; continue; } // Check whether node arguments match arguments values for (i = 0; i < len; i++) { if (node.args[i] !== arguments[i]) { node = node.next; continue searchCache; } } // At this point we can assume we've found a match // Surface matched node to head if not already if (node !== head) { // As tail, shift to previous. Must only shift if not also // head, since if both head and tail, there is no previous. if (node === tail) { tail = node.prev; } // Adjust siblings to point to each other. If node was tail, // this also handles new tail's empty `next` assignment. /** @type {MemizeCacheNode} */ (node.prev).next = node.next; if (node.next) { node.next.prev = node.prev; } node.next = head; node.prev = null; /** @type {MemizeCacheNode} */ (head).prev = node; head = node; } // Return immediately return node.val; } // No cached value found. Continue to insertion phase: // Create a copy of arguments (avoid leaking deoptimization) args = new Array(len); for (i = 0; i < len; i++) { args[i] = arguments[i]; } node = { args: args, // Generate the result from original function val: fn.apply(null, args), }; // Don't need to check whether node is already head, since it would // have been returned above already if it was // Shift existing head down list if (head) { head.prev = node; node.next = head; } else { // If no head, follows that there's no tail (at initial or reset) tail = node; } // Trim tail if we're reached max size and are pending cache insertion if (size === /** @type {MemizeOptions} */ (options).maxSize) { tail = /** @type {MemizeCacheNode} */ (tail).prev; /** @type {MemizeCacheNode} */ (tail).next = null; } else { size++; } head = node; return node.val; } memoized.clear = function () { head = null; tail = null; size = 0; }; // Ignore reason: There's not a clear solution to create an intersection of // the function with additional properties, where the goal is to retain the // function signature of the incoming argument and add control properties // on the return value. // @ts-ignore return memoized; } export { memize as default };