UNPKG

key-value-compress

Version:

Key-Value interface to deflate, split, concatenate and inflate storage.

121 lines (120 loc) 3.98 kB
"use strict"; /** * key-value-compress.ts */ Object.defineProperty(exports, "__esModule", { value: true }); exports.compressKVS = void 0; const _compress = require("./_compress"); const _transform_1 = require("./_transform"); const _util_1 = require("./_util"); /** * Key-Value storage interceptor to deflate, split, concatenate and inflate content */ function compressKVS(options) { let { chunkNS, chunkSize, compress, digest, encoding } = options || {}; let { inlineSize, metaNS, metaStorage, storage, ttl } = options || {}; if (!metaStorage) metaStorage = storage; if (metaNS) metaStorage = (0, _transform_1.namespaceKVS)(metaStorage, metaNS); const metaKSV = (0, _transform_1.stringifyKVS)(metaStorage, JSON); if (chunkNS) storage = (0, _transform_1.namespaceKVS)(storage, chunkNS); if (encoding) storage = (0, _util_1.base64KVS)(storage, encoding); let compressor = _compress[compress || "deflate" /* defaults.compress */]; if (inlineSize == null) inlineSize = 1024 /* defaults.inlineSize */; if (!chunkSize) chunkSize = 491520 /* defaults.chunkSize */; if (!digest) digest = "sha1" /* defaults.digest */; return { get, set }; async function set(key, value) { const meta = { v: 2 /* defaults.version */, chunks: [] }; let raw; if (Buffer.isBuffer(value)) { raw = value; meta.type = "Buffer"; } else { if ("string" === typeof value) { raw = Buffer.from(value); meta.type = "string"; } else { raw = Buffer.from(JSON.stringify(value)); meta.type = "JSON"; } } if (+ttl) { meta.dt = +Date.now(); } const compressed = await compressor.compress(raw); const chunks = (0, _util_1.splitBuffer)(compressed, chunkSize); for (let chunk of chunks) { const hash = (0, _util_1.digestBuffer)(digest, chunk); if (chunk.length <= inlineSize) { const inline = meta.inline || (meta.inline = {}); inline[hash] = chunk.toString("base64"); } else { await storage.set(hash, chunk); } meta.chunks.push(hash); } await metaKSV.set(key, meta); } async function get(key) { const meta = await metaKSV.get(key); if (!meta) return; const { inline, chunks, dt, v } = meta; // reject data when stored with a future version of the library if (!+v || v > 2 /* defaults.version */) throw Error(`Invalid storage version: ${v}`); if (!chunks) return; if (!chunks.length) return; if (ttl) { if (!dt) return; if (dt < Date.now() - ttl) return; } const array = []; for (const hash of chunks) { const base64 = inline && inline[hash]; let chunk; if (base64) { chunk = Buffer.from(base64, "base64"); } else { chunk = await storage.get(hash); } if (!chunk) return; const check = (0, _util_1.digestBuffer)(digest, chunk); if (check !== hash) return; array.push(chunk); } const joined = (0, _util_1.concatBuffer)(array); const buffer = await compressor.decompress(joined); if (!buffer) return; let value; if (meta.type === "Buffer") { value = buffer; } else if (meta.type === "string") { value = buffer.toString(); } else { value = JSON.parse(buffer); } return value; } } exports.compressKVS = compressKVS;