UNPKG

@naturalcycles/nodejs-lib

Version:
104 lines (103 loc) 4.64 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.transformLogProgress = void 0; const stream_1 = require("stream"); const util_1 = require("util"); const js_lib_1 = require("@naturalcycles/js-lib"); const colors_1 = require("../../colors"); const colors_2 = require("../../colors/colors"); const sizeStack_1 = require("../sizeStack"); const inspectOpt = { colors: colors_2.hasColors, breakLength: 300, }; /** * Pass-through transform that optionally logs progress. */ function transformLogProgress(opt = {}) { const { metric = 'progress', heapTotal: logHeapTotal = false, heapUsed: logHeapUsed = false, rss: logRss = true, peakRSS: logPeakRSS = true, logRPS = true, logEvery = 1000, logSizes = false, logSizesBuffer = 100000, logZippedSizes = false, batchSize = 1, extra, logger = console, } = opt; const logProgress = opt.logProgress !== false && logEvery !== 0; // true by default const logEvery10 = logEvery * 10; const started = Date.now(); let lastSecondStarted = Date.now(); const sma = new js_lib_1.SimpleMovingAverage(10); // over last 10 seconds let processedLastSecond = 0; let progress = 0; let peakRSS = 0; const sizes = logSizes ? new sizeStack_1.SizeStack('json', logSizesBuffer) : undefined; const sizesZipped = logZippedSizes ? new sizeStack_1.SizeStack('json.gz', logSizesBuffer) : undefined; logStats(); // initial return new stream_1.Transform({ objectMode: true, ...opt, transform(chunk, _, cb) { progress++; processedLastSecond++; if (sizes) { // Check it, cause gzipping might be delayed here.. void sizeStack_1.SizeStack.countItem(chunk, logger, sizes, sizesZipped); } if (logProgress && progress % logEvery === 0) { logStats(chunk, false, progress % logEvery10 === 0); } cb(null, chunk); // pass-through }, final(cb) { logStats(undefined, true); cb(); }, }); function logStats(chunk, final = false, tenx = false) { if (!logProgress) return; const mem = process.memoryUsage(); const now = Date.now(); const batchedProgress = progress * batchSize; const lastRPS = (processedLastSecond * batchSize) / ((now - lastSecondStarted) / 1000) || 0; const rpsTotal = Math.round(batchedProgress / ((now - started) / 1000)) || 0; lastSecondStarted = now; processedLastSecond = 0; const rps10 = Math.round(sma.push(lastRPS)); if (mem.rss > peakRSS) peakRSS = mem.rss; const o = { [final ? `${metric}_final` : metric]: batchedProgress, }; if (extra) Object.assign(o, extra(chunk, progress)); if (logHeapUsed) o.heapUsed = (0, js_lib_1._mb)(mem.heapUsed); if (logHeapTotal) o.heapTotal = (0, js_lib_1._mb)(mem.heapTotal); if (logRss) o.rss = (0, js_lib_1._mb)(mem.rss); if (logPeakRSS) o.peakRSS = (0, js_lib_1._mb)(peakRSS); if (opt.rssMinusHeap) o.rssMinusHeap = (0, js_lib_1._mb)(mem.rss - mem.heapTotal); if (opt.external) o.external = (0, js_lib_1._mb)(mem.external); if (opt.arrayBuffers) o.arrayBuffers = (0, js_lib_1._mb)(mem.arrayBuffers || 0); if (logRPS) Object.assign(o, { rps10, rpsTotal }); logger.log((0, util_1.inspect)(o, inspectOpt)); if (sizes?.items.length) { logger.log(sizes.getStats()); if (sizesZipped?.items.length) { logger.log(sizesZipped.getStats()); } } if (tenx) { let perHour = Math.round((batchedProgress * 1000 * 60 * 60) / (now - started)) || 0; if (perHour > 900) { perHour = Math.round(perHour / 1000) + 'K'; } logger.log(`${(0, colors_1.dimGrey)((0, js_lib_1.localTime)().toPretty())} ${(0, colors_1.white)(metric)} took ${(0, colors_1.yellow)((0, js_lib_1._since)(started))} so far to process ${(0, colors_1.yellow)(batchedProgress)} rows, ~${(0, colors_1.yellow)(perHour)}/hour`); } else if (final) { logger.log(`${(0, colors_1.boldWhite)(metric)} took ${(0, colors_1.yellow)((0, js_lib_1._since)(started))} to process ${(0, colors_1.yellow)(batchedProgress)} rows with total RPS of ${(0, colors_1.yellow)(rpsTotal)}`); } } } exports.transformLogProgress = transformLogProgress;