@naturalcycles/nodejs-lib
Version:
Standard library for Node.js
120 lines (119 loc) • 6.45 kB
TypeScript
import type { Transform } from 'node:stream';
import type { ReadableStream as WebReadableStream } from 'node:stream/web';
import type { ZlibOptions, ZstdOptions } from 'node:zlib';
import type { AbortableAsyncMapper, AsyncIndexedMapper, AsyncPredicate, END, IndexedMapper, Integer, NonNegativeInteger, PositiveInteger, Predicate, SKIP } from '@naturalcycles/js-lib/types';
import type { ReadableTyped, TransformOptions, TransformTyped, WritableTyped } from './stream.model.js';
import type { TransformLogProgressOptions } from './transform/transformLogProgress.js';
import type { TransformMapOptions } from './transform/transformMap.js';
import type { TransformMapSimpleOptions } from './transform/transformMapSimple.js';
import type { TransformMapSyncOptions } from './transform/transformMapSync.js';
import type { TransformOffsetOptions } from './transform/transformOffset.js';
import type { TransformThrottleOptions } from './transform/transformThrottle.js';
import type { TransformThrottleByRSSOptions } from './transform/transformThrottleByRSS.js';
import type { TransformWarmupOptions } from './transform/transformWarmup.js';
export declare class Pipeline<T = unknown> {
private readonly source;
private transforms;
private destination?;
private readableLimit?;
private objectMode;
private abortableSignal;
private constructor();
static from<T>(source: ReadableTyped<T>): Pipeline<T>;
/**
* Useful in cases when Readable is not immediately available,
* but only available after an async operation is completed.
* Implemented via a proxy Transform, which should be transparent.
*/
static fromAsyncReadable<T = unknown>(fn: () => Promise<ReadableTyped<T>>): Pipeline<T>;
static fromWeb<T>(webReadableStream: WebReadableStream<T>): Pipeline<T>;
/**
* Technically same as `fromIterable` (since Array is Iterable),
* but named a bit friendlier.
*/
static fromArray<T>(input: T[]): Pipeline<T>;
static fromIterable<T>(input: Iterable<T> | AsyncIterable<T>): Pipeline<T>;
static fromNDJsonFile<T>(sourceFilePath: string): Pipeline<T>;
static fromFile(sourceFilePath: string): Pipeline<Uint8Array>;
/**
* Limits the source Readable, but using `.take(limit)` on it.
* This is THE preferred way of limiting the source.
*/
limitSource(limit: NonNegativeInteger | undefined): this;
/**
* If possible - STRONGLY PREFER applying `.take(limit)` on the source Readable,
* as it's a clean graceful way of limiting the Readable. Example:
*
* Pipeline.from(myReadable.take(10))
*
* or
*
* Pipeline
* .from(myReadable)
* .limitSource(10)
*
* If applying `take` on Readable is not possible - use this method at your own risk.
* Why warning?
* The limit works by aborting the stream, and then catching the error - certainly
* less clean than `.take()` on the source.
*/
limit(limit: NonNegativeInteger | undefined): this;
chunk(chunkSize: PositiveInteger, opt?: TransformOptions): Pipeline<T[]>;
flatten<TO>(this: Pipeline<readonly TO[]>): Pipeline<TO>;
flattenIfNeeded(): Pipeline<T extends readonly (infer TO)[] ? TO : T>;
logProgress(opt?: TransformLogProgressOptions): this;
map<TO>(mapper: AbortableAsyncMapper<T, TO | typeof SKIP | typeof END>, opt?: TransformMapOptions<T, TO>): Pipeline<TO>;
mapSync<TO>(mapper: IndexedMapper<T, TO | typeof SKIP | typeof END>, opt?: TransformMapSyncOptions): Pipeline<TO>;
mapSimple<TO>(mapper: IndexedMapper<T, TO>, opt?: TransformMapSimpleOptions): Pipeline<TO>;
filter(asyncPredicate: AsyncPredicate<T>, opt?: TransformMapOptions): this;
filterSync(predicate: Predicate<T>, opt?: TransformOptions): this;
offset(opt: TransformOffsetOptions): this;
tap(fn: AsyncIndexedMapper<T, any>, opt?: TransformOptions): this;
tapSync(fn: IndexedMapper<T, any>, opt?: TransformOptions): this;
throttle(opt: TransformThrottleOptions): this;
throttleByRSS(opt: TransformThrottleByRSSOptions): this;
/**
* @experimental to be removed after transformMap2 is stable
*/
warmup(opt: TransformWarmupOptions): this;
transform<TO>(transform: TransformTyped<T, TO>): Pipeline<TO>;
/**
* Helper method to add multiple transforms at once.
* Not type safe! Prefer using singular `transform()` multiple times for type safety.
*/
transformMany<TO>(transforms: Transform[]): Pipeline<TO>;
fork(fn: (pipeline: Pipeline<T>) => Promise<void>, opt?: TransformOptions): this;
/**
* Utility method just to conveniently type-cast the current Pipeline type.
* No runtime effect.
*/
typeCastAs<TO>(): Pipeline<TO>;
setObjectMode(objectMode: boolean): this;
/**
* Transform the stream of Objects into a stream of JSON lines.
* Technically, it goes into objectMode=false, so it's a binary stream at the end.
*/
toNDJson(): Pipeline<Uint8Array>;
parseNDJson<TO = unknown>(this: Pipeline<Uint8Array>): Pipeline<TO>;
splitOnNewline(this: Pipeline<Uint8Array>): Pipeline<Buffer>;
parseJson<TO = unknown>(this: Pipeline<Buffer> | Pipeline<Uint8Array> | Pipeline<string>): Pipeline<TO>;
gzip(this: Pipeline<Uint8Array>, opt?: ZlibOptions): Pipeline<Uint8Array>;
gunzip(this: Pipeline<Uint8Array>, opt?: ZlibOptions): Pipeline<Uint8Array>;
zstdCompress(this: Pipeline<Uint8Array>, level?: Integer, // defaults to 3
opt?: ZstdOptions): Pipeline<Uint8Array>;
zstdDecompress(this: Pipeline<Uint8Array>, opt?: ZstdOptions): Pipeline<Uint8Array>;
toArray(opt?: TransformOptions): Promise<T[]>;
toFile(outputFilePath: string): Promise<void>;
/**
* level corresponds to zstd compression level (if filename ends with .zst),
* or gzip compression level (if filename ends with .gz).
* Default levels are:
* gzip: 6
* zlib: 3 (optimized for throughput, not size, may be larger than gzip at its default level)
*/
toNDJsonFile(outputFilePath: string, level?: Integer): Promise<void>;
to(destination: WritableTyped<T>): Promise<void>;
forEach(fn: AsyncIndexedMapper<T, void>, opt?: TransformMapOptions<T, void> & TransformLogProgressOptions<T>): Promise<void>;
forEachSync(fn: IndexedMapper<T, void>, opt?: TransformMapSyncOptions<T, void> & TransformLogProgressOptions<T>): Promise<void>;
run(): Promise<void>;
}