@akashbabu/lfu-cache
Version:
LFU cache implementation with a complexity of `O(1)` for all transactions
281 lines • 12.4 kB
JavaScript
;
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const chai_1 = require("chai");
const delay_1 = __importDefault(require("delay"));
const src_1 = __importDefault(require("../src"));
describe("#LFUCache()", () => {
it("should work w/o errors when constructor is called", () => {
chai_1.expect(() => {
new src_1.default();
}).to.not.throw;
});
it("should at-max store only the specified number of items in the cache", () => {
const SIZE = 5;
const lfu = new src_1.default({ max: SIZE });
new Array(SIZE + 5).fill(0).forEach((_, i) => {
lfu.set(i.toString(), i);
});
const state = lfu.dangerously_getState();
chai_1.expect(state.byKey.size).to.be.eql(SIZE);
chai_1.expect(state.nodeList.length).to.be.eql(SIZE);
});
it("should initialize state with empty contents in the state", () => {
const lfu = new src_1.default({ max: 10 });
const state = lfu.dangerously_getState();
chai_1.expect(state.byKey.size).to.be.eql(0);
chai_1.expect(state.nodeList.length).to.be.eql(0);
chai_1.expect(state.freqList.length).to.be.eql(0);
});
describe(".set()", () => {
it("should save the given key-value pair", () => {
const SIZE = 5;
const lfu = new src_1.default({ max: SIZE });
new Array(SIZE - 1).fill(0).forEach((_, i) => {
lfu.set(i.toString(), i);
});
new Array(SIZE - 1).fill(0).forEach((_, i) => {
chai_1.expect(lfu.get(i.toString())).to.be.eql(i);
});
const state = lfu.dangerously_getState();
chai_1.expect(state.byKey.size).to.be.eql(SIZE - 1);
chai_1.expect(state.nodeList.length).to.be.eql(SIZE - 1);
chai_1.expect(state.freqList.length).to.be.eql(1);
});
it("should evict LFU data from the cache if the size of the cache exceeds the specified limit", () => {
const SIZE = 5;
const lfu = new src_1.default({ max: SIZE });
new Array(SIZE).fill(0).forEach((_, i) => {
lfu.set(i.toString(), i);
});
new Array(SIZE - 1).fill(0).forEach((_, i) => {
chai_1.expect(lfu.get(i.toString())).to.be.eql(i);
});
lfu.set("5", 5);
chai_1.expect(lfu.get("4")).to.be.undefined;
});
it("should not replace the existing cached value when `force` is false", () => {
const SIZE = 5;
const lfu = new src_1.default({ max: SIZE });
lfu.set("foo", "bar");
lfu.set("foo", "baz");
chai_1.expect(lfu.get("foo")).to.be.eql("bar");
});
it("should replace the existing cached value when `force` is true", () => {
const SIZE = 3;
const lfu = new src_1.default({ max: SIZE });
new Array(SIZE).fill(0).forEach((_, i) => {
lfu.set(`foo_${i + 1}`, `bar_${i + 1}`);
});
// Increment the access frequency
lfu.get("foo_1");
lfu.set("foo_1", "bar_1_2", true);
lfu.set("foo_4", "bar_4");
lfu.set("foo_5", "bar_5");
lfu.set("foo_6", "bar_6");
chai_1.expect(lfu.get("foo_2")).to.be.undefined;
chai_1.expect(lfu.get("foo_3")).to.be.undefined;
chai_1.expect(lfu.get("foo_1")).to.be.eql("bar_1_2");
});
});
describe(".get()", () => {
let lfu;
beforeEach(() => {
const SIZE = 5;
lfu = new src_1.default({ max: SIZE, maxAge: 100 });
});
it("should return the value for the requested key", () => {
lfu.set("foo", "bar");
chai_1.expect(lfu.get("foo")).to.be.eql("bar");
});
it("should return undefined if the given key is not present in the cache", () => {
chai_1.expect(lfu.get("empty")).to.be.undefined;
});
it("should increment the frequency of the item, on get", () => {
lfu.set("foo", "bar");
chai_1.expect(lfu.dangerously_getState().byKey.get("foo").data.parent.data.value).to.be.eql(1);
chai_1.expect(lfu.get("foo")).to.be.eql("bar");
chai_1.expect(lfu.dangerously_getState().byKey.get("foo").data.parent.data.value).to.be.eql(2);
});
it("should prune the items lazily after the specified maxAge", () => __awaiter(void 0, void 0, void 0, function* () {
lfu.set("foo", "bar");
yield delay_1.default(120);
chai_1.expect(lfu.get("foo")).to.be.undefined;
chai_1.expect(lfu.size).to.be.eql(0);
}));
it("should update the utime of the nodeItem when the item is accessed", () => __awaiter(void 0, void 0, void 0, function* () {
lfu.set("foo", "bar");
yield delay_1.default(70);
lfu.get("foo");
yield delay_1.default(40);
chai_1.expect(lfu.get("foo")).to.be.eql("bar");
chai_1.expect(lfu.size).to.be.eql(1);
}));
});
describe(".delete()", () => {
let lfu;
beforeEach(() => {
const SIZE = 5;
lfu = new src_1.default({ max: SIZE });
});
it("should delete the given key from the cache", () => {
lfu.set("foo", "bar");
chai_1.expect(lfu.get("foo")).to.be.eql("bar");
lfu.delete("foo");
chai_1.expect(lfu.get("foo")).to.be.undefined;
});
});
describe(".peek()", () => {
let lfu;
const SIZE = 5;
beforeEach(() => {
lfu = new src_1.default({ max: SIZE });
});
it("should return the value for the requested key", () => {
lfu.set("foo", "bar");
chai_1.expect(lfu.peek("foo")).to.be.eql("bar");
});
it("should return undefined if the requested key is not present", () => {
chai_1.expect(lfu.peek("emtpy")).to.be.undefined;
});
it("should not increase the access frequency when peek is called", () => {
lfu.set("foo", "bar");
chai_1.expect(lfu.dangerously_getState().byKey.get("foo").data.parent.data.value).to.be.eql(1);
chai_1.expect(lfu.peek("foo")).to.be.eql("bar");
chai_1.expect(lfu.dangerously_getState().byKey.get("foo").data.parent.data.value).to.be.eql(1);
});
});
describe(".forEach()", () => {
let lfu;
const SIZE = 5;
beforeEach(() => {
lfu = new src_1.default({ max: SIZE });
});
it("should be iteratable", () => {
new Array(SIZE).fill(0).forEach((_, i) => {
lfu.set(i.toString(), i);
});
lfu.forEach(([key, value], i) => {
chai_1.expect(key).to.be.eql(i.toString());
chai_1.expect(value).to.be.eql(i);
});
});
});
describe(".map()", () => {
let lfu;
const SIZE = 5;
beforeEach(() => {
lfu = new src_1.default({ max: SIZE });
});
it("should be iterable and return a resultant array", () => {
new Array(SIZE).fill(0).forEach((_, i) => {
lfu.set(i.toString(), i);
});
const result = lfu.map(([key, value], i) => {
chai_1.expect(key).to.be.eql(i.toString());
chai_1.expect(value).to.be.eql(i);
return i + 1;
});
result.map((i, j) => {
chai_1.expect(i).to.be.eql(j + 1);
});
});
});
describe(".size", () => {
let lfu;
const SIZE = 5;
beforeEach(() => {
lfu = new src_1.default({ max: SIZE });
});
it("should be increased when a new item is added to the cache", () => {
chai_1.expect(lfu.size).to.be.eql(0);
lfu.set("foo", "bar");
chai_1.expect(lfu.size).to.be.eql(1);
});
it("should decrease the length of the cache of removing the given key", () => {
chai_1.expect(lfu.size).to.be.eql(0);
lfu.set("foo", "bar");
lfu.set("bar", "baz");
chai_1.expect(lfu.size).to.be.eql(2);
lfu.delete("foo");
chai_1.expect(lfu.size).to.be.eql(1);
lfu.delete("bar");
chai_1.expect(lfu.size).to.be.eql(0);
});
it("should decrease the length of the cache if the given key does not exist in the cache", () => {
chai_1.expect(lfu.size).to.be.eql(0);
lfu.set("foo", "bar");
chai_1.expect(lfu.size).to.be.eql(1);
lfu.delete("unknown");
chai_1.expect(lfu.size).to.be.eql(1);
});
// This test case was submitted in #1
it("should never be more than the max size specified", () => {
const cache = new src_1.default({ max: 100, evictCount: 1 });
// Generate random number
const random = (min, max) => Math.round(Math.random() * (max - min)) + min;
const cacheFunc = (key, value) => {
if (typeof cache.peek(key) !== "undefined")
return cache.get(key);
cache.set(key, value);
return value;
};
// "cold" will be called only once
cacheFunc("cold", "123");
// "hot" will be called for 20 times
for (let i = 1; i <= 20; i++) {
cacheFunc("hot", "456");
}
// Let the cache size limit exceeded
for (let i = 1; i <= 200; i++) {
// all of them will be called three times
cacheFunc(String(100 + i), random(100, 900));
cacheFunc(String(100 + i), random(100, 900));
cacheFunc(String(100 + i), random(100, 900));
}
// "new" will be called once
cacheFunc("new", "123");
// Should be 789 not 123, because the cold cache will be removed from the cache.
chai_1.expect(cacheFunc("cold", "789")).to.be.eql("789");
// Should be 456 not 789, because it is hot and remains in the cache.
chai_1.expect(cacheFunc("hot", "789")).to.be.eql("456");
// Should be 123 not 789, because it is a new item added to the cache.
chai_1.expect(cacheFunc("new", "123")).to.be.eql("123");
chai_1.expect(cache.size).to.be.eql(100);
});
});
describe("clear()", () => {
it("should clear nodeList, freqList and size of the cache", () => {
const lfu = new src_1.default();
lfu.set("foo", "bar");
chai_1.expect(lfu.size).to.be.eql(1);
lfu.clear();
chai_1.expect(lfu.size).to.be.eql(0);
});
it("should not cause any memory leakage in the process of clearing", function () {
this.timeout(50 * 1000);
const SIZE = 10000;
const lfu = new src_1.default({ max: SIZE + 1 });
for (let i = 0; i < 1000; ++i) {
for (let j = 0; j < SIZE; ++j) {
lfu.set("foo" + j, "bar");
}
chai_1.expect(lfu.size).to.be.eql(SIZE);
lfu.clear();
chai_1.expect(lfu.size).to.be.eql(0);
}
});
});
});
//# sourceMappingURL=lfu.spec.js.map