helene
Version:
Real-time Web Apps for Node.js
338 lines • 12.4 kB
JavaScript
"use strict";
/**
* IDBStorage - High-Performance IndexedDB Storage Implementation
*
* This module provides an optimized IndexedDB-based storage implementation with several
* performance enhancements over basic storage approaches. It's designed to handle large
* datasets efficiently while maintaining data integrity and providing fast access times.
*
* ## Key Features & Optimizations
*
* ### 1. Smart Chunking Strategy
* - Documents are split into 256KB chunks for optimal IndexedDB performance
* - Smaller chunks provide better granularity for partial updates
* - Efficient chunk management reduces memory overhead
*
* ### 2. LRU Caching Layer
* - In-memory cache for frequently accessed documents (default: 50 documents)
* - Immediate cache updates on writes for instant read performance
* - LRU eviction prevents memory bloat in long-running applications
* - Cache-first reads eliminate IndexedDB hits for hot data
*
* ### 3. Batch Write Operations
* - Write operations are batched with configurable delay (default: 100ms)
* - Reduces IndexedDB transaction overhead significantly
* - Async writes with immediate cache updates for perceived performance
* - Manual flush capability for critical operations
*
* ### 4. Optimized Append Operations
* - Efficient append operations that leverage batched writes
* - Cache-first approach for maximum performance when data is already loaded
* - Minimal disk reads - only when data isn't cached
* - Consistent behavior with write operations through unified batching system
*
* ### 5. Robust Error Handling
* - Per-chunk error recovery for corrupted data
* - Fallback mechanisms for data integrity
* - Comprehensive logging for debugging
*
* ## Performance Benefits
*
* - **Append Operations**: Cache-hit appends are O(1), cache-miss appends require one read + batched write
* - **Read Performance**: Cache hits are near-instantaneous
* - **Write Performance**: Batched + async writes reduce UI blocking
* - **Memory Usage**: Bounded cache with LRU eviction prevents memory leaks
* - **Chunking Benefits**: Better handling of large documents and partial updates
*
* ## Usage Examples
*
* ```typescript
* const storage = new IDBStorage()
*
* // Basic operations - all return immediately from cache when possible
* await storage.write('document', 'large text data...')
* const data = await storage.read('document') // Cache hit if recently written
*
* // Efficient append operations
* await storage.append('log', 'new log entry\n') // O(1) operation
*
* // Force pending writes to complete
* await storage.flush()
*
* // Clear all data
* await storage.clear()
* ```
*
* ## Architecture
*
* The implementation is split into focused, testable components:
*
* - **DocumentCache**: Manages LRU caching with configurable size limits
* - **BatchWriter**: Handles write batching and scheduling with error recovery
* - **IDBStorage**: Main class coordinating all components with clean separation of concerns
*
* ## IndexedDB Schema
*
* - **Database**: `helene_data`
* - **Store**: `chunks` with keyPath `id`
* - **Index**: `docId` for efficient document chunk lookup
*
* Each chunk contains:
* - `id`: Unique chunk identifier (`${docId}-${chunkIndex}`)
* - `docId`: Document identifier with namespace prefix
* - `chunkIndex`: Ordering index for chunk reassembly
* - `content`: Actual data
*
* ## Browser Compatibility
*
* - Requires IndexedDB support (all modern browsers)
* - Uses modern async/await syntax
* - No external dependencies beyond 'idb' helper library
* - Graceful error handling for storage quota limitations
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.IDBStorage = exports.DB_VERSION = exports.DB_NAME = exports.STORE_NAME = exports.CHUNK_SIZE = void 0;
const idb_1 = require("idb");
exports.CHUNK_SIZE = 512 * 1024; // Increased to 512KB for better performance with larger documents
exports.STORE_NAME = 'chunks';
exports.DB_NAME = 'helene_data';
exports.DB_VERSION = 1;
class DocumentCache {
cache = new Map();
maxSize;
constructor(maxSize = 100) {
// Increased default cache size
this.maxSize = maxSize;
}
get(key) {
const value = this.cache.get(key);
if (value !== undefined) {
// LRU: move to end
this.cache.delete(key);
this.cache.set(key, value);
}
return value;
}
set(key, value) {
this.cache.set(key, value);
this.evictIfNeeded();
}
delete(key) {
this.cache.delete(key);
}
clear() {
this.cache.clear();
}
evictIfNeeded() {
if (this.cache.size > this.maxSize) {
const firstKey = this.cache.keys().next().value;
this.cache.delete(firstKey);
}
}
}
class BatchWriter {
pendingWrites = new Map();
pendingData = new Map();
batchDelay;
flushCallback;
constructor(flushCallback, batchDelay = 50) {
this.flushCallback = flushCallback;
this.batchDelay = batchDelay;
}
schedule(name, data) {
this.clearPending(name);
this.pendingData.set(name, data);
const timeout = setTimeout(async () => {
this.pendingWrites.delete(name);
const latestData = this.pendingData.get(name);
this.pendingData.delete(name);
if (latestData !== undefined) {
try {
await this.flushCallback(name, latestData);
}
catch (error) {
console.error(`Failed to flush ${name}:`, error);
}
}
}, this.batchDelay);
this.pendingWrites.set(name, timeout);
}
async flushAll(cache) {
const promises = Array.from(this.pendingWrites.entries()).map(([name, timeout]) => {
clearTimeout(timeout);
this.pendingWrites.delete(name);
const pendingData = this.pendingData.get(name);
this.pendingData.delete(name);
const data = pendingData ?? cache.get(name);
return data ? this.flushCallback(name, data) : Promise.resolve();
});
await Promise.all(promises);
}
clear() {
for (const timeout of this.pendingWrites.values()) {
clearTimeout(timeout);
}
this.pendingWrites.clear();
this.pendingData.clear();
}
clearPending(name) {
const existing = this.pendingWrites.get(name);
if (existing) {
clearTimeout(existing);
this.pendingWrites.delete(name);
}
this.pendingData.delete(name);
}
}
const dbPromise = (0, idb_1.openDB)(exports.DB_NAME, exports.DB_VERSION, {
upgrade(db) {
const store = db.createObjectStore(exports.STORE_NAME, { keyPath: 'id' });
store.createIndex('docId', 'docId');
},
});
class IDBStorage {
prefix = 'helene:data:';
db = null;
cache;
batchWriter;
chunkSize;
constructor(options) {
this.cache = new DocumentCache(options?.cacheSize ?? 100);
this.batchWriter = new BatchWriter((name, data) => this.writeToDisk(name, data), options?.batchDelay ?? 50);
this.chunkSize = options?.chunkSize ?? exports.CHUNK_SIZE;
}
async getDB() {
if (!this.db) {
this.db = await dbPromise;
}
return this.db;
}
async read(name) {
const cached = this.cache.get(name);
if (cached !== undefined) {
return cached;
}
const result = await this.readFromDisk(name);
this.cache.set(name, result);
return result;
}
async write(name, data) {
this.cache.set(name, data);
this.batchWriter.schedule(name, data);
}
async append(name, data) {
const cached = this.cache.get(name);
if (cached !== undefined) {
const newData = cached + data;
this.cache.set(name, newData);
this.batchWriter.schedule(name, newData);
return;
}
// For cache miss, we need the current content to calculate the new total
const currentData = await this.readFromDisk(name);
const newData = currentData + data;
// Update cache with the new content
this.cache.set(name, newData);
// Schedule the write operation
this.batchWriter.schedule(name, newData);
}
async flush() {
await this.batchWriter.flushAll(this.cache);
}
async clear() {
this.cache.clear();
this.batchWriter.clear();
const db = await this.getDB();
const tx = db.transaction(exports.STORE_NAME, 'readwrite');
await tx.objectStore(exports.STORE_NAME).clear();
await tx.done;
}
async readFromDisk(name) {
try {
const docId = `${this.prefix}${name}`;
const db = await this.getDB();
// For small documents, use getAllFromIndex
const tx = db.transaction(exports.STORE_NAME, 'readonly');
const index = tx.objectStore(exports.STORE_NAME).index('docId');
// First, get the count to decide strategy
const count = await index.count(docId);
if (count === 0)
return '';
// Use array for efficient string building
const chunks = [];
if (count <= 10) {
// For small documents, load all chunks at once
const allChunks = await index.getAll(docId);
allChunks.sort((a, b) => a.chunkIndex - b.chunkIndex);
for (const chunk of allChunks) {
chunks.push(chunk.content || '');
}
}
else {
// For large documents, use cursor-based streaming
let cursor = await index.openCursor(docId);
const chunkMap = new Map();
while (cursor) {
chunkMap.set(cursor.value.chunkIndex, cursor.value.content || '');
cursor = await cursor.continue();
}
// Sort by chunk index and build result
const sortedIndices = Array.from(chunkMap.keys()).sort((a, b) => a - b);
for (const idx of sortedIndices) {
chunks.push(chunkMap.get(idx));
}
}
await tx.done;
return chunks.join('');
}
catch (error) {
console.error('Error reading from IndexedDB:', error);
throw new Error('Failed to read data from storage');
}
}
async writeToDisk(name, data) {
try {
const docId = `${this.prefix}${name}`;
const db = await this.getDB();
const chunks = this.createChunks(data);
// Use a single transaction for both delete and write operations
const tx = db.transaction(exports.STORE_NAME, 'readwrite');
const store = tx.objectStore(exports.STORE_NAME);
// Delete existing chunks and write new ones in the same transaction
await this.deleteAndWriteChunks(store, docId, chunks);
await tx.done;
}
catch (error) {
console.error('Error writing to IndexedDB:', error);
throw new Error('Failed to write data to storage');
}
}
createChunks(data) {
const chunks = [];
for (let i = 0; i < data.length; i += this.chunkSize) {
chunks.push(data.slice(i, i + this.chunkSize));
}
return chunks;
}
async deleteAndWriteChunks(store, docId, chunks) {
// First delete existing chunks
const index = store.index('docId');
const range = IDBKeyRange.only(docId);
// Use cursor to delete while we iterate (more memory efficient)
let cursor = await index.openCursor(range);
while (cursor) {
await store.delete(cursor.primaryKey);
cursor = await cursor.continue();
}
// Then write new chunks in parallel
const writePromises = chunks.map((chunk, i) => store.put({
id: `${docId}-${i}`,
docId,
chunkIndex: i,
content: chunk,
}));
await Promise.all(writePromises);
}
}
exports.IDBStorage = IDBStorage;
//# sourceMappingURL=idb-storage.js.map