stromjs
Version:
Dependency-free streams utils for Node.js
262 lines (261 loc) • 13.3 kB
JavaScript
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
exports.__esModule = true;
exports.strom = void 0;
var accumulator_1 = require("./accumulator");
var batch_1 = require("./batch");
var child_1 = require("./child");
var collect_1 = require("./collect");
var concat_1 = require("./concat");
var duplex_1 = require("./duplex");
var filter_1 = require("./filter");
var flatMap_1 = require("./flatMap");
var fromArray_1 = require("./fromArray");
var join_1 = require("./join");
var last_1 = require("./last");
var map_1 = require("./map");
var merge_1 = require("./merge");
var parallelMap_1 = require("./parallelMap");
var parse_1 = require("./parse");
var rate_1 = require("./rate");
var reduce_1 = require("./reduce");
var replace_1 = require("./replace");
var split_1 = require("./split");
var stringify_1 = require("./stringify");
var unbatch_1 = require("./unbatch");
var compose_1 = require("./compose");
var demux_1 = require("./demux");
function strom(defaultOptions) {
if (defaultOptions === void 0) { defaultOptions = { objectMode: true }; }
function withDefaultOptions(n, fn) {
return function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
var options = __assign(__assign({}, defaultOptions), (args[n] || {}));
var provided = args.slice(0, n);
var nextArgs = __spreadArray(__spreadArray(__spreadArray([], provided, true), Array(n - provided.length).fill(undefined), true), [
options,
], false);
return fn.apply(void 0, nextArgs);
};
}
return {
/**
* Convert an array into a Readable stream of its elements
* @param array Array of elements to stream
*/
fromArray: fromArray_1.fromArray,
/**
* Return a ReadWrite stream that maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
* @param options?
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
*/
map: withDefaultOptions(1, map_1.map),
/**
* Return a ReadWrite stream that flat maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
* @param options?
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
*/
flatMap: withDefaultOptions(1, flatMap_1.flatMap),
/**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks
* @param options?
* @param options.objectMode? Whether this stream should behave as a stream of objects.
*/
filter: withDefaultOptions(1, filter_1.filter),
/**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value
* @param iteratee Reducer function to apply on each streamed chunk
* @param initialValue Initial value
* @param options?
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
*/
reduce: withDefaultOptions(2, reduce_1.reduce),
/**
* Return a ReadWrite stream that splits streamed chunks using the given separator
* @param separator? Separator to split by, defaulting to "\n"
* @param options? Defaults to encoding: utf8
* @param options.encoding? Encoding written chunks are assumed to use
*/
split: split_1.split,
/**
* Return a ReadWrite stream that joins streamed chunks using the given separator
* @param separator Separator to join with
* @param options? Defaults to encoding: utf8
* @param options.encoding? Encoding written chunks are assumed to use
*/
join: withDefaultOptions(1, join_1.join),
/**
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
* the streamed chunks with the specified replacement string
* @param searchValue Search string to use
* @param replaceValue Replacement string to use
* @param options? Defaults to encoding: utf8
* @param options.encoding Encoding written chunks are assumed to use
*/
replace: replace_1.replace,
/**
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
* must be a fully defined JSON string in utf8.
* @param format: @type SerializationFormats defaults SerializationFormats.utf8
* @param emitError: @type boolean Whether or not to emit an error when
* failing to parse. An error will automatically close the stream.
* Defaults to true.
*/
parse: parse_1.parse,
/**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
* @param options?
* @param options.pretty If true, whitespace is inserted into the stringified chunks.
*
*/
stringify: stringify_1.stringify,
/**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options?
* @param options.objectMode? Whether this stream should behave as a stream of objects
*/
collect: withDefaultOptions(0, collect_1.collect),
/**
* Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to concatenate
*/
concat: concat_1.concat,
/**
* Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to merge
*/
merge: merge_1.merge,
/**
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
* cause the given readable stream to yield chunks
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
*/
duplex: duplex_1.duplex,
/**
* Return a Duplex stream from a child process' stdin and stdout
* @param childProcess Child process from which to create duplex stream
*/
child: child_1.child,
/**
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
* ended
* @param readable Readable stream to wait on
*/
last: last_1.last,
/**
* Stores chunks of data internally in array and batches when batchSize is reached.
* @param batchSize Size of the batches, defaults to 1000.
* @param maxBatchAge? Max lifetime of a batch, defaults to 500
* @param options?
* @param options.objectMode? Whether this stream should behave as a stream of objects
*/
batch: withDefaultOptions(2, batch_1.batch),
/**
* Unbatches and sends individual chunks of data.
* @param options?
* @param options.objectMode? Whether this stream should behave as a stream of objects
*/
unbatch: withDefaultOptions(0, unbatch_1.unbatch),
/**
* Limits rate of data transferred into stream.
* @param targetRate? Desired rate in ms.
* @param period? Period to sleep for when rate is above or equal to targetRate.
* @param options?
*/
rate: withDefaultOptions(2, rate_1.rate),
/**
* Limits number of parallel processes in flight.
* @param parallel Max number of parallel processes.
* @param func Function to execute on each data chunk.
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
*/
parallelMap: withDefaultOptions(3, parallelMap_1.parallelMap),
/**
* Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items
* in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies:
* 1. Sliding
* - If the buffer is larger than the batchSize, the front of the buffer is popped to maintain
* the batchSize. When no key is provided, the batchSize is effectively the buffer length. When
* a key is provided, the batchSize is based on the value at that key. For example, given a key
* of `timestamp` and a batchSize of 3000, each item in the buffer will be guaranteed to be
* within 3000 timestamp units from the first element. This means that with a key, multiple elements
* may be spliced off the front of the buffer. The buffer is then pushed into the stream.
* 2. Rolling
* - If the buffer is larger than the batchSize, the buffer is cleared and pushed into the stream.
* When no key is provided, the batchSize is the buffer length. When a key is provided, the batchSize
* is based on the value at that key. For example, given a key of `timestamp` and a batchSize of 3000,
* each item in the buffer will be guaranteed to be within 3000 timestamp units from the first element.
* @param flushStrategy Buffering strategy to use.
* @param batchSize Size of the batch (in units of buffer length or value at key).
* @param keyBy Key to determine if element fits into buffer or items need to be cleared from buffer.
* @param options Transform stream options
*/
accumulator: withDefaultOptions(3, accumulator_1.accumulator),
/**
* Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items
* in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies:
* 1. Sliding
* - If the iteratee returns false, the front of the buffer is popped until iteratee returns true. The
* item is pushed into the buffer and buffer is pushed into stream.
* 2. Rolling
* - If the iteratee returns false, the buffer is cleared and pushed into stream. The item is
* then pushed into the buffer.
* @param flushStrategy Buffering strategy to use.
* @param iteratee Function applied to buffer when a chunk of data enters stream to determine if element fits into
* or items need to be cleared from buffer.
* @param options Transform stream options
*/
accumulatorBy: withDefaultOptions(2, accumulator_1.accumulatorBy),
/**
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
* @param streams Array of streams to compose. Minimum of two.
* @param errorCallback a function that handles any error coming out of the pipeline
* @param options Transform stream options
*/
compose: withDefaultOptions(2, compose_1.compose),
/**
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
* @param construct Constructor for new output source. Should return a Writable or ReadWrite stream.
* @param demuxBy
* @param demuxBy.key? Key to fetch value from source chunks to demultiplex source.
* @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source.
* @param options Writable stream options
*/
demux: withDefaultOptions(2, demux_1.demux),
/**
* Create a new strom instance overriding the defaults
*/
instance: strom
};
}
exports.strom = strom;
;