streaming
Version:
Transforms and other streaming helpers
101 lines (91 loc) • 4.05 kB
JavaScript
;
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Queue = void 0;
var stream_1 = require("stream");
/**
Queue is much like streaming.Transfomer(transform, options), but it applies the
`transform` function to each piece of data, at most `concurrency` at a time.
Except! the `transform` function does not have access to the stream as `this`,
and so it cannot `this.push(...)` to handle data; it must use the `callback`
function to return the result (or error if an error arose).
Order of the output is not guaranteed, but it shouldn't get mixed up more than
`concurrency` places different.
We use stream.Duplex rather than stream.Transform because output is not
precisely "causally connected" to the input -- there are side-effects (the
duration of the transform function) that complicate the mapping.
Example:
new streaming.Queue(10, (chunk, encoding, callback) => {
setTimeout(() => {
callback(null, {result: 'chunk length is ' + chunk.length + '.'});
}, Math.random() * 500);
}, {objectMode: true});
*/
var Queue = /** @class */ (function (_super) {
__extends(Queue, _super);
function Queue(concurrency, transformFn, options) {
var _this = _super.call(this, options) || this;
_this.concurrency = concurrency;
_this.transformFn = transformFn;
_this._inProgress = 0;
return _this;
}
/** Called when the user wants data from this stream.
From the [stream docs](http://nodejs.org/api/stream.html#stream_readable_read_size_1):
> When data is available, put it into the read queue by calling
> readable.push(chunk). If push returns false, then you should stop reading.
> When _read is called again, you should start pushing more data.
Since are mostly pulling, rather than pushing, this is a no-op.
We might conceivably use it to determine if are free to stop processing
incoming tasks; i.e., if no one wants them, we don't need to read them.
*/
Queue.prototype._read = function (size) {
// console.error('Queue._read called: %j', size);
};
Queue.prototype._write = function (chunk, encoding, callback) {
var _this = this;
// console.error('Queue._write called: %d', this._inProgress);
this._inProgress++;
var finalized = false;
this.transformFn(chunk, encoding, function (err, outputChunk) {
_this._inProgress--;
if (err) {
_this.emit('error', err);
}
else {
_this.push(outputChunk);
}
if (!finalized) {
callback();
finalized = true; // necessary?
}
// if (this._inProgress === 0 && this._writableState.ended === true) {
// this.push(null);
// }
});
// if we have not yet hit the concurrency limit, we say that we have handled it immediately
if (this._inProgress < this.concurrency) {
// check for finalized here, in case this.transform is not truly async
if (!finalized) {
callback();
finalized = true;
}
}
};
return Queue;
}(stream_1.Duplex));
exports.Queue = Queue;