@naturalcycles/nodejs-lib
Version:
Standard library for Node.js
49 lines (48 loc) • 1.76 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.writableFork = void 0;
const stream_1 = require("stream");
const __1 = require("../..");
/**
* Allows "forking" a stream inside pipeline into a number of pipeline chains (2 or more).
* Currently does NOT (!) maintain backpressure.
* Error in the forked pipeline will propagate up to the main pipeline (and log error, to be sure).
* Will wait until all forked pipelines are completed before completing the stream.
*
* @experimental
*/
function writableFork(chains, opt) {
const readables = [];
const allChainsDone = Promise.all(chains.map(async (chain) => {
const readable = (0, __1.readableCreate)();
readables.push(readable);
return await (0, __1._pipeline)([readable, ...chain]);
})).catch(err => {
console.error(err); // ensure the error is logged
throw err;
});
return new stream_1.Writable({
objectMode: true,
...opt,
write(chunk, _, cb) {
// Push/fork to all sub-streams
// No backpressure is ensured here, it'll push regardless of the
readables.forEach(readable => readable.push(chunk));
cb();
},
async final(cb) {
try {
// Push null (complete) to all sub-streams
readables.forEach(readable => readable.push(null));
console.log(`writableFork.final is waiting for all chains to be done`);
await allChainsDone;
console.log(`writableFork.final all chains done`);
cb();
}
catch (err) {
cb(err);
}
},
});
}
exports.writableFork = writableFork;