stromjs
Version:
Dependency-free streams utils for Node.js
210 lines (209 loc) • 12.3 kB
TypeScript
/// <reference types="node" />
import { TransformOptions } from "stream";
import { child } from "./child";
import { concat } from "./concat";
import { duplex } from "./duplex";
import { fromArray } from "./fromArray";
import { last } from "./last";
import { merge } from "./merge";
import { parse } from "./parse";
import { replace } from "./replace";
import { split } from "./split";
import { stringify } from "./stringify";
export declare function strom(defaultOptions?: TransformOptions): {
/**
* Convert an array into a Readable stream of its elements
* @param array Array of elements to stream
*/
fromArray: typeof fromArray;
/**
* Return a ReadWrite stream that maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to a new chunk (or a promise of such)
* @param options?
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
*/
map: <T, R>(mapper: (chunk: T, encoding: string) => R, options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Return a ReadWrite stream that flat maps streamed chunks
* @param mapper Mapper function, mapping each (chunk, encoding) to an array of new chunks (or a promise of such)
* @param options?
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
*/
flatMap: <T_1, R_1>(mapper: ((chunk: T_1, encoding: string) => R_1[]) | ((chunk: T_1, encoding: string) => Promise<R_1[]>), options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Return a ReadWrite stream that filters out streamed chunks for which the predicate does not hold
* @param predicate Predicate with which to filter scream chunks
* @param options?
* @param options.objectMode? Whether this stream should behave as a stream of objects.
*/
filter: <T_2>(predicate: ((chunk: T_2, encoding: string) => boolean) | ((chunk: T_2, encoding: string) => Promise<boolean>), options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Return a ReadWrite stream that reduces streamed chunks down to a single value and yield that
* value
* @param iteratee Reducer function to apply on each streamed chunk
* @param initialValue Initial value
* @param options?
* @param options.readableObjectMode? Whether this stream should behave as a readable stream of objects
* @param options.writableObjectMode? Whether this stream should behave as a writable stream of objects
*/
reduce: <T_3, R_2>(iteratee: ((previousValue: R_2, chunk: T_3, encoding: string) => R_2) | ((previousValue: R_2, chunk: T_3, encoding: string) => Promise<R_2>), initialValue: R_2, options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Return a ReadWrite stream that splits streamed chunks using the given separator
* @param separator? Separator to split by, defaulting to "\n"
* @param options? Defaults to encoding: utf8
* @param options.encoding? Encoding written chunks are assumed to use
*/
split: typeof split;
/**
* Return a ReadWrite stream that joins streamed chunks using the given separator
* @param separator Separator to join with
* @param options? Defaults to encoding: utf8
* @param options.encoding? Encoding written chunks are assumed to use
*/
join: (separator: string, options?: (import("./baseDefinitions").WithEncoding & TransformOptions) | undefined) => import("stream").Transform;
/**
* Return a ReadWrite stream that replaces occurrences of the given string or regular expression in
* the streamed chunks with the specified replacement string
* @param searchValue Search string to use
* @param replaceValue Replacement string to use
* @param options? Defaults to encoding: utf8
* @param options.encoding Encoding written chunks are assumed to use
*/
replace: typeof replace;
/**
* Return a ReadWrite stream that parses the streamed chunks as JSON. Each streamed chunk
* must be a fully defined JSON string in utf8.
* @param format: @type SerializationFormats defaults SerializationFormats.utf8
* @param emitError: @type boolean Whether or not to emit an error when
* failing to parse. An error will automatically close the stream.
* Defaults to true.
*/
parse: typeof parse;
/**
* Return a ReadWrite stream that stringifies the streamed chunks to JSON
* @param options?
* @param options.pretty If true, whitespace is inserted into the stringified chunks.
*
*/
stringify: typeof stringify;
/**
* Return a ReadWrite stream that collects streamed chunks into an array or buffer
* @param options?
* @param options.objectMode? Whether this stream should behave as a stream of objects
*/
collect: (options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to concatenate
*/
concat: typeof concat;
/**
* Return a Readable stream of readable streams concatenated together
* @param streams Readable streams to merge
*/
merge: typeof merge;
/**
* Return a Duplex stream from a writable stream that is assumed to somehow, when written to,
* cause the given readable stream to yield chunks
* @param writable Writable stream assumed to cause the readable stream to yield chunks when written to
* @param readable Readable stream assumed to yield chunks when the writable stream is written to
*/
duplex: typeof duplex;
/**
* Return a Duplex stream from a child process' stdin and stdout
* @param childProcess Child process from which to create duplex stream
*/
child: typeof child;
/**
* Return a Promise resolving to the last streamed chunk of the given readable stream, after it has
* ended
* @param readable Readable stream to wait on
*/
last: typeof last;
/**
* Stores chunks of data internally in array and batches when batchSize is reached.
* @param batchSize Size of the batches, defaults to 1000.
* @param maxBatchAge? Max lifetime of a batch, defaults to 500
* @param options?
* @param options.objectMode? Whether this stream should behave as a stream of objects
*/
batch: (batchSize?: number | undefined, maxBatchAge?: number | undefined, options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Unbatches and sends individual chunks of data.
* @param options?
* @param options.objectMode? Whether this stream should behave as a stream of objects
*/
unbatch: (options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Limits rate of data transferred into stream.
* @param targetRate? Desired rate in ms.
* @param period? Period to sleep for when rate is above or equal to targetRate.
* @param options?
*/
rate: (targetRate?: number | undefined, period?: number | undefined, options?: (TransformOptions & import("./rate").RateOptions) | undefined) => import("stream").Transform;
/**
* Limits number of parallel processes in flight.
* @param parallel Max number of parallel processes.
* @param func Function to execute on each data chunk.
* @param pause Amount of time to pause processing when max number of parallel processes are executing.
*/
parallelMap: <T_4, R_3>(mapper: (data: T_4) => R_3, parallel?: number | undefined, sleepTime?: number | undefined, options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items
* in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies:
* 1. Sliding
* - If the buffer is larger than the batchSize, the front of the buffer is popped to maintain
* the batchSize. When no key is provided, the batchSize is effectively the buffer length. When
* a key is provided, the batchSize is based on the value at that key. For example, given a key
* of `timestamp` and a batchSize of 3000, each item in the buffer will be guaranteed to be
* within 3000 timestamp units from the first element. This means that with a key, multiple elements
* may be spliced off the front of the buffer. The buffer is then pushed into the stream.
* 2. Rolling
* - If the buffer is larger than the batchSize, the buffer is cleared and pushed into the stream.
* When no key is provided, the batchSize is the buffer length. When a key is provided, the batchSize
* is based on the value at that key. For example, given a key of `timestamp` and a batchSize of 3000,
* each item in the buffer will be guaranteed to be within 3000 timestamp units from the first element.
* @param flushStrategy Buffering strategy to use.
* @param batchSize Size of the batch (in units of buffer length or value at key).
* @param keyBy Key to determine if element fits into buffer or items need to be cleared from buffer.
* @param options Transform stream options
*/
accumulator: (flushStrategy: import("./accumulator").FlushStrategy, batchSize: number, keyBy?: string | undefined, options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Accummulates and sends batches of data. Each chunk that flows into the stream is checked against items
* in the buffer. How the buffer is mutated is based on 1 of 2 possible buffering strategies:
* 1. Sliding
* - If the iteratee returns false, the front of the buffer is popped until iteratee returns true. The
* item is pushed into the buffer and buffer is pushed into stream.
* 2. Rolling
* - If the iteratee returns false, the buffer is cleared and pushed into stream. The item is
* then pushed into the buffer.
* @param flushStrategy Buffering strategy to use.
* @param iteratee Function applied to buffer when a chunk of data enters stream to determine if element fits into
* or items need to be cleared from buffer.
* @param options Transform stream options
*/
accumulatorBy: <T_5>(flushStrategy: import("./accumulator").FlushStrategy, iteratee: import("./accumulator").AccumulatorByIteratee<T_5>, options?: TransformOptions | undefined) => import("stream").Transform;
/**
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
* @param streams Array of streams to compose. Minimum of two.
* @param errorCallback a function that handles any error coming out of the pipeline
* @param options Transform stream options
*/
compose: (streams: (import("stream").Readable | import("stream").Writable)[], errorCallback?: ((err: any) => void) | undefined, options?: TransformOptions | undefined) => import("./compose").Compose;
/**
* Composes multiple streams together. Writing occurs on first stream, piping occurs from last stream.
* @param construct Constructor for new output source. Should return a Writable or ReadWrite stream.
* @param demuxBy
* @param demuxBy.key? Key to fetch value from source chunks to demultiplex source.
* @param demuxBy.keyBy? Function to fetch value from source chunks to demultiplex source.
* @param options Writable stream options
*/
demux: (pipelineConstructor: (destKey?: string | undefined, chunk?: any) => import("stream").Writable | import("stream").Writable[], demuxBy: string | ((chunk: any) => string), options?: import("./demux").DemuxOptions | undefined) => import("stream").Duplex;
/**
* Create a new strom instance overriding the defaults
*/
instance: typeof strom;
};