@jahed/sparql-engine
Version:
SPARQL query engine for servers and web browsers.
124 lines • 3.79 kB
JavaScript
// SPDX-License-Identifier: MIT
import { LRUCache } from "lru-cache";
/**
* An in-memory LRU cache
*/
export class BaseLRUCache {
_content;
/**
* Constructor
* @param maxSize - The maximum size of the cache
* @param maxAge - Maximum age in ms
* @param length - Function that is used to calculate the length of stored items
* @param onDispose - Function that is called on items when they are dropped from the cache
*/
constructor(options) {
// if we set a dispose function, we need to turn 'noDisposeOnSet' to True,
// otherwise onDispose will be called each time an item is updated (instead of when it slide out),
// which will break any class extending BaseAsyncCache
if (options.dispose !== undefined) {
options["noDisposeOnSet"] = true;
}
this._content = new LRUCache(options);
}
put(key, item) {
this._content.set(key, item);
}
has(key) {
return this._content.has(key);
}
get(key) {
if (this._content.has(key)) {
return this._content.get(key);
}
return null;
}
delete(key) {
this._content.delete(key);
}
count() {
return this._content.size;
}
}
/**
* A base class for implementing an asynchronous cache.
* It simply needs to provides a data structure used to cache items
*/
export class BaseAsyncCache {
_cache;
constructor(cache) {
this._cache = cache;
}
has(key) {
return this._cache.has(key);
}
update(key, item, writerID) {
if (this._cache.has(key)) {
const entry = this._cache.get(key);
if (entry.writerID === writerID) {
entry.content.push(item);
this._cache.put(key, entry);
}
}
else {
this._cache.put(key, {
content: [item],
writerID,
isComplete: false,
pendingReaders: [],
});
}
}
commit(key, writerID) {
if (this._cache.has(key)) {
const entry = this._cache.get(key);
if (entry.writerID === writerID) {
// update cache entry ot marke it complete
this._cache.put(key, {
content: entry.content,
writerID: entry.writerID,
isComplete: true,
pendingReaders: [],
});
// resolve all pending readers
entry.pendingReaders.forEach((resolve) => resolve(entry.content));
}
}
}
get(key) {
if (this.has(key)) {
const entry = this._cache.get(key);
if (entry.isComplete) {
return Promise.resolve(entry.content);
}
// wait until the entry is complete
// all awaiting promises will be resolved by the commit or delete method
return new Promise((resolve) => {
entry.pendingReaders.push(resolve);
});
}
return null;
}
delete(key, writerID) {
if (this._cache.has(key)) {
const entry = this._cache.get(key);
if (entry.writerID === writerID) {
this._cache.delete(key);
// resolve all pending readers with an empty result
entry.pendingReaders.forEach((resolve) => resolve([]));
}
}
}
count() {
return this._cache.count();
}
}
/**
* An in-memory LRU implementation of an asynchronous cache.
*/
export class AsyncLRUCache extends BaseAsyncCache {
constructor(options) {
super(new BaseLRUCache(options));
}
}
//# sourceMappingURL=cache-base.js.map