@naturalcycles/nodejs-lib
Version:
Standard library for Node.js
83 lines (82 loc) • 3.76 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.AbortableTransform = exports._pipelineToArray = exports._pipeline = void 0;
const stream_1 = require("stream");
const js_lib_1 = require("@naturalcycles/js-lib");
const index_1 = require("../../index");
/**
* Promisified `stream.pipeline`.
*
* Supports opt.allowClose, which allows transformLimit to work (to actually stop source Readable)
* without throwing an error (ERR_STREAM_PREMATURE_CLOSE).
*/
async function _pipeline(streams, opt = {}) {
const first = streams[0];
const rest = streams.slice(1);
if (opt.allowClose) {
// Do the magic of making the pipeline "abortable"
//
// How does it work:
// It finds `sourceReadable` (basically, it's just first item in the passed array of streams)
// Finds last "writable" (last item), patches the `_final` method of it to detect when the whole pipeline is "done",
// sets the `streamDone` DeferredPromise that resolves when the pipeline is done.
// Scans through all passed items, finds those that are capable of "closing" the stream
// (currently its `transformLimit` or `transformMap`)
// Patches them by attaching `sourceReadable` and `streamDone`.
// These items (transformLimit and transformMap), when they need to "close the stream" - call `pipelineClose`.
// `pipelineClose` is the result of 2 sleepless nights of googling and experimentation:)
// It does:
// 1. Stops the "downstream" by doing `this.push(null)`.
// 2. Pauses the `sourceReadable` by calling sourceReadable.unpipe()
// 3. Waits for `streamDone` to ensure that downstream chunks are fully processed (e.g written to disk).
// 4. Calls `sourceReadable.destroy()`, which emits ERR_STREAM_PREMATURE_CLOSE
// 5. _pipeline (this function) catches that specific error and suppresses it (because it's expected and
// inevitable in this flow). Know a better way to close the stream? Tell me!
const streamDone = (0, js_lib_1.pDefer)();
const sourceReadable = first;
const last = (0, js_lib_1._last)(streams);
const lastFinal = last._final?.bind(last) || ((cb) => cb());
last._final = cb => {
lastFinal(() => {
cb();
streamDone.resolve();
});
};
rest.forEach(s => {
// console.log(s)
if (s instanceof AbortableTransform || s.constructor.name === 'DestroyableTransform') {
// console.log(`found ${s.constructor.name}, setting props`)
;
s.sourceReadable = sourceReadable;
s.streamDone = streamDone;
}
});
}
return new Promise((resolve, reject) => {
(0, stream_1.pipeline)(first, ...rest, (err) => {
if (err) {
if (opt.allowClose && err?.code === 'ERR_STREAM_PREMATURE_CLOSE') {
console.log('_pipeline closed (as expected)');
return resolve();
}
// console.log(`_pipeline error`, err)
return reject(err);
}
resolve();
});
});
}
exports._pipeline = _pipeline;
/**
* Convenience function to make _pipeline collect all items at the end of the stream (should be Transform, not Writeable!)
* and return.
*/
async function _pipelineToArray(streams, opt = {}) {
const a = [];
await _pipeline([...streams, (0, index_1.writablePushToArray)(a)], opt);
return a;
}
exports._pipelineToArray = _pipelineToArray;
class AbortableTransform extends stream_1.Transform {
}
exports.AbortableTransform = AbortableTransform;