UNPKG

helene

Version:
154 lines 5.2 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.LocalForageStorage = void 0; const localforage_1 = __importDefault(require("localforage")); const debounce_1 = __importDefault(require("lodash/debounce")); const zod_1 = require("zod"); const lz_string_1 = __importDefault(require("lz-string")); const difference_1 = __importDefault(require("lodash/difference")); const ChunkSchema = zod_1.z.object({ id: zod_1.z.string(), content: zod_1.z.string(), }); const MetadataSchema = zod_1.z.object({ chunkIds: zod_1.z.array(zod_1.z.string()), }); const CacheSchema = zod_1.z.object({ metadata: MetadataSchema, content: zod_1.z.string(), }); const MetadataDB = localforage_1.default.createInstance({ name: 'helene-metadata', driver: localforage_1.default.INDEXEDDB, }); const ChunksDB = localforage_1.default.createInstance({ name: 'helene-chunks', driver: localforage_1.default.INDEXEDDB, }); class LocalForageStorage { chunkSize = 512 * 1024; cache = new Map(); async read(name) { const metadata = await MetadataDB.getItem(name); if (!metadata) { this.cache.set(name, { metadata: { chunkIds: [] }, content: '', }); return ''; } const validatedMetadata = MetadataSchema.safeParse(metadata); if (!validatedMetadata.success) { console.error(':invalid_metadata', validatedMetadata.error); return ''; } let data = ''; for (const chunkId of metadata.chunkIds) { const chunkData = await ChunksDB.getItem(chunkId); if (chunkData) { data += lz_string_1.default.decompress(chunkData.content); } } this.cache.set(name, { metadata, content: data, }); return data; } async append(name, data) { let cache = this.cache.get(name); if (!cache) { await this.read(name); cache = this.cache.get(name); } if (!cache) { this.cache.set(name, { metadata: { chunkIds: [] }, content: data, }); } else { cache.content += data; } this.debouncedFlush(name); } async write(name, data) { let cache = this.cache.get(name); if (!cache) { await this.read(name); cache = this.cache.get(name); } if (!cache) { this.cache.set(name, { metadata: { chunkIds: [] }, content: data, }); } else { cache.content = data; } this.debouncedFlush(name); } async flush(name) { const cache = this.cache.get(name); if (!cache) { return; } console.log('flushing', name); const newChunks = await this.chunkify(cache.content, cache.metadata.chunkIds); const existingChunkIds = cache.metadata.chunkIds; const newChunkIds = newChunks.map(chunk => chunk.id); const chunkIdsToRemove = (0, difference_1.default)(existingChunkIds, newChunkIds); for (const chunk of newChunks) { if (existingChunkIds.includes(chunk.id)) { continue; } // Might never happen, since we are checking for existing chunk ids already if (chunk.content === undefined) { continue; } await ChunksDB.setItem(chunk.id, chunk); } for (const chunkId of chunkIdsToRemove) { await ChunksDB.removeItem(chunkId); } await MetadataDB.removeItem(name); await MetadataDB.setItem(name, { chunkIds: newChunks.map(chunk => chunk.id), }); await this.read(name); } async chunkify(str, existingChunkIds, chunkSize = this.chunkSize) { const chunks = []; for (let i = 0; i < str.length; i += chunkSize) { const content = str.slice(i, i + chunkSize); const id = await this.sha256(content); if (existingChunkIds.includes(id)) { chunks.push({ id, }); continue; } chunks.push({ id, content: lz_string_1.default.compress(content), }); } console.log('chunks compressed', chunks.filter(chunk => chunk.content !== undefined)); return chunks; } async sha256(str) { const encoder = new TextEncoder(); const data = encoder.encode(str); const hash = await crypto.subtle.digest('SHA-256', data); return Array.from(new Uint8Array(hash)) .map(b => b.toString(16).padStart(2, '0')) .join(''); } debouncedFlush = (0, debounce_1.default)(this.flush, 1000); } exports.LocalForageStorage = LocalForageStorage; //# sourceMappingURL=localforage-storage.js.map