UNPKG

js-lru

Version:

A finite key-value cache using the Least Recently Used (LRU) cache algorithm where the most recently used objects are keept in cache while less recently used items are purged.

133 lines (128 loc) 4.06 kB
/** * A doubly linked list-based Least Recently Used (LRU) cache. Will keep most * recently used items while discarding least recently used items when its limit * is reached. * * Licensed under MIT. Copyright (c) 2010 Rasmus Andersson <http://hunch.se/> * See README.md for details. * * Illustration of the design: * * entry entry entry entry * ______ ______ ______ ______ * | head |.newer => | |.newer => | |.newer => | tail | * | A | | B | | C | | D | * |______| <= older.|______| <= older.|______| <= older.|______| * * removed <-- <-- <-- <-- <-- <-- <-- <-- <-- <-- <-- added */ function LRUCache (limit) { // Current size of the cache. (Read-only). this.size = 0 // Maximum number of items this cache can hold. this.limit = limit this._keymap = {} } /** * Put <value> into the cache associated with <key>. Returns the entry which was * removed to make room for the new entry. Otherwise undefined is returned * (i.e. if there was enough room already). */ LRUCache.prototype.put = function (key, value) { var entry = {key: key, value: value} // Note: No protection agains replacing, and thus orphan entries. By design. this._keymap[key] = entry if (this.tail) { // link previous tail to the new tail (entry) this.tail.newer = entry entry.older = this.tail } else { // we're first in -- yay this.head = entry } // add new entry to the end of the linked list -- it's now the freshest entry. this.tail = entry if (this.size === this.limit) { // we hit the limit -- remove the head return this.shift() } else { // increase the size counter this.size++ } } /** * Purge the least recently used (oldest) entry from the cache. Returns the * removed entry or undefined if the cache was empty. * * If you need to perform any form of finalization of purged items, this is a * good place to do it. Simply override/replace this function: * * var c = new LRUCache(123); * c.shift = function() { * var entry = LRUCache.prototype.shift.call(this); * doSomethingWith(entry); * return entry; * } */ LRUCache.prototype.shift = function () { // todo: handle special case when limit == 1 var entry = this.head if (entry) { if (this.head.newer) { // advance the list this.head = this.head.newer this.head.older = undefined } else { // the cache is exhausted this.head = undefined this.tail = undefined } // Remove last strong reference to <entry> and remove links from the purged // entry being returned: entry.newer = entry.older = undefined // delete is slow, but we need to do this to avoid uncontrollable growth: delete this._keymap[entry.key] this.size-- } return entry } /** * Get and register recent use of <key>. Returns the value associated with <key> * or undefined if not in cache. */ LRUCache.prototype.get = function (key, returnEntry) { // First, find our cache entry var entry = this._keymap[key] if (entry === undefined) { return // Not cached. Sorry. } // As <key> was found in the cache, register it as being requested recently if (entry === this.tail) { // Already the most recenlty used entry, so no need to update the list return returnEntry ? entry : entry.value } // HEAD--------------TAIL // <.older .newer> // <--- add direction -- // A B C <D> E if (entry.newer) { if (entry === this.head) { this.head = entry.newer } entry.newer.older = entry.older // C <-- E. } if (entry.older) { entry.older.newer = entry.newer // C. --> E } entry.newer = undefined // D --x entry.older = this.tail // D. --> E if (this.tail) { this.tail.newer = entry // E. <-- D } this.tail = entry return returnEntry ? entry : entry.value } // Export ourselves if (typeof this === 'object') { this.LRUCache = LRUCache }