UNPKG

strogger

Version:

📊 A modern structured logging library with functional programming, duck-typing, and comprehensive third-party integrations

134 lines • 4.25 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.createBatchedLogger = exports.createBatchedTransport = void 0; /** * Creates a batched transport wrapper around an existing transport */ const createBatchedTransport = (transport, config = { maxSize: 100, maxWaitTime: 5000, maxBatchSize: 1024 * 1024, // 1MB }) => { const state = { logs: [], currentSize: 0, lastFlush: Date.now(), }; const stats = { totalLogs: 0, totalBatches: 0, averageBatchSize: 0, lastFlushTime: 0, pendingLogs: 0, }; const calculateBatchSize = (logs) => { return logs.reduce((size, log) => { return size + JSON.stringify(log).length; }, 0); }; const flush = async () => { if (state.logs.length === 0) return; const logsToSend = [...state.logs]; state.logs = []; state.currentSize = 0; state.lastFlush = Date.now(); // Clear any pending timer if (state.flushTimer) { clearTimeout(state.flushTimer); state.flushTimer = undefined; } try { // Send logs in parallel to the transport await Promise.allSettled(logsToSend.map((log) => transport.log(log))); // Update stats stats.totalLogs += logsToSend.length; stats.totalBatches += 1; stats.averageBatchSize = stats.totalLogs / stats.totalBatches; stats.lastFlushTime = Date.now(); stats.pendingLogs = state.logs.length; } catch (error) { console.error("Batch flush failed:", error); // Re-add logs to the batch for retry state.logs.unshift(...logsToSend); state.currentSize = calculateBatchSize(state.logs); } }; const scheduleFlush = () => { if (state.flushTimer) return; state.flushTimer = setTimeout(() => { flush().catch(console.error); }, config.maxWaitTime); }; const log = async (entry) => { const entrySize = JSON.stringify(entry).length; // Check if adding this entry would exceed batch limits if (state.logs.length >= config.maxSize || state.currentSize + entrySize >= config.maxBatchSize) { await flush(); } state.logs.push(entry); state.currentSize += entrySize; stats.pendingLogs = state.logs.length; // Schedule flush if this is the first log if (state.logs.length === 1) { scheduleFlush(); } }; const setLevel = (level) => { transport.setLevel(level); }; const getLevel = () => { return transport.getLevel(); }; const getStats = () => { return { ...stats, pendingLogs: state.logs.length }; }; return { log, setLevel, getLevel, flush, getStats, }; }; exports.createBatchedTransport = createBatchedTransport; /** * Creates a multi-transport batched logger */ const createBatchedLogger = (transports, config = { maxSize: 50, maxWaitTime: 2000, maxBatchSize: 512 * 1024, // 512KB }) => { const batchedTransports = transports.map((transport) => (0, exports.createBatchedTransport)(transport, config)); const log = async (entry) => { await Promise.allSettled(batchedTransports.map((transport) => transport.log(entry))); }; const setLevel = (level) => { for (const transport of batchedTransports) { transport.setLevel(level); } }; const getLevel = () => { // Return the minimum level across all transports return Math.min(...batchedTransports.map((t) => t.getLevel())); }; const flush = async () => { await Promise.allSettled(batchedTransports.map((transport) => transport.flush())); }; const getStats = () => { return batchedTransports.map((transport) => transport.getStats()); }; return { log, setLevel, getLevel, flush, getStats, }; }; exports.createBatchedLogger = createBatchedLogger; //# sourceMappingURL=batching.js.map