UNPKG

@naturalcycles/nodejs-lib

Version:
46 lines 1.75 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const stream_1 = require("stream"); const __1 = require("../.."); /** * Allows "forking" a stream inside pipeline into a number of pipeline chains (2 or more). * Currently does NOT (!) maintain backpressure. * Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure). * Will wait until all forked pipelines are completed before completing the stream. */ function writableFork(chains, opt) { const readables = []; const allChainsDone = Promise.all(chains.map(async (chain) => { const readable = __1.readableCreate(); readables.push(readable); return await __1._pipeline([readable, ...chain]); })).catch(err => { console.error(err); // ensure the error is logged throw err; }); return new stream_1.Writable({ objectMode: true, ...opt, write(chunk, _encoding, cb) { // Push/fork to all sub-streams // No backpressure is ensured here, it'll push regardless of the readables.forEach(readable => readable.push(chunk)); cb(); }, async final(cb) { try { // Push null (complete) to all sub-streams readables.forEach(readable => readable.push(null)); console.log(`writableFork.final is waiting for all chains to be done`); await allChainsDone; console.log(`writableFork.final all chains done`); cb(); } catch (err) { cb(err); } }, }); } exports.writableFork = writableFork; //# sourceMappingURL=writableFork.js.map