UNPKG

sflow

Version:

sflow is a powerful and highly-extensible library designed for processing and manipulating streams of data effortlessly. Inspired by the functional programming paradigm, it provides a rich set of utilities for transforming streams, including chunking, fil

725 lines (685 loc) 26.2 kB
import DIE from "phpdie"; import PolyfillTextEncoderStream from "polyfill-text-encoder-stream"; import type { Ord } from "rambda"; import type { FieldPathByValue } from "react-hook-form"; import type { AsyncOrSync } from "ts-essentials"; import type { Split } from "ts-toolbelt/out/String/Split"; import { asyncMaps } from "./asyncMaps"; import type { Awaitable } from "./Awaitable"; import { cacheLists } from "./cacheLists"; import { cacheSkips } from "./cacheSkips"; import { cacheTails } from "./cacheTails"; import { chunkBys } from "./chunkBys"; import { chunkIfs } from "./chunkIfs"; import { chunkIntervals } from "./chunkIntervals"; import { chunks } from "./chunks"; import type { ChunkTransformer } from "./chunkTransforms"; import { concats, concatStream } from "./concats"; import { confluences } from "./confluences"; import { convolves } from "./convolves"; import { debounces } from "./debounces"; import { filters } from "./filters"; import { finds } from "./finds"; import { flatMaps } from "./flatMaps"; import { flats } from "./flats"; import type { FlowSource } from "./FlowSource"; import { forEachs } from "./forEachs"; import { toStream } from "./froms"; import { heads } from "./heads"; import { limits } from "./limits"; import { lines } from "./lines"; import { logs } from "./logs"; import { mapAddFields } from "./mapAddFields"; import { maps } from "./maps"; import { merges } from "./merges"; import { mergeStream } from "./mergeStream"; import { mergeStreamsByAscend, mergeStreamsByDescend } from "./mergeStreamsBy"; import { nils } from "./nils"; import { peeks } from "./peeks"; import { pMaps } from "./pMaps"; import { portals } from "./portals"; import { reduceEmits } from "./reduceEmits"; import { reduces } from "./reduces"; import { riffles } from "./riffles"; import { skips } from "./skips"; import { slices } from "./slices"; import type { SourcesType } from "./SourcesType"; import { streamAsyncIterator } from "./streamAsyncIterator"; import { matchAlls, matchs, replaceAlls, replaces } from "./strings"; import { tails } from "./tails"; import { tees } from "./tees"; import { terminates } from "./terminates"; import { throttles } from "./throttles"; import { throughs } from "./throughs"; import { uniqBys, uniqs } from "./uniqs"; import type { Unwinded } from "./Unwinded"; import { unwinds } from "./unwinds"; import { wseToArray, wseToPromise } from "./wse"; import { csvFormats, csvParses, tsvFormats, tsvParses } from "./xsvStreams"; import { toLatests } from "./toLatest"; export type Reducer<S, T> = (state: S, x: T, i: number) => Awaitable<S>; export type EmitReducer<S, T, R> = ( state: S, x: T, i: number ) => Awaitable<{ next: S; emit: R }>; export interface BaseFlow<T> { _type: T; readable: ReadableStream<T>; writable: WritableStream<T>; /** @deprecated use chunk */ buffer(...args: Parameters<typeof chunks<T>>): sflow<T[]>; cacheSkip(...args: Parameters<typeof cacheSkips<T>>): sflow<T>; cacheList(...args: Parameters<typeof cacheLists<T>>): sflow<T>; cacheTail(...args: Parameters<typeof cacheTails<T>>): sflow<T>; chunk(...args: Parameters<typeof chunks<T>>): sflow<T[]>; /** inverse of flat, chunk all items */ chunk(): sflow<T[]>; /** inverse of flat, chunk or buffer a length as array */ chunk(...args: Parameters<typeof chunks<T>>): sflow<T[]>; /** group by (only affect on concat items)*/ chunkBy(...args: Parameters<typeof chunkBys<T>>): sflow<T[]>; /** @see {@link chunkIfs} */ chunkIf(...args: Parameters<typeof chunkIfs<T>>): sflow<T[]>; chunkTransforms<T>(options: { start?: ChunkTransformer<T>; transform?: ChunkTransformer<T>; flush?: ChunkTransformer<T>; }): sflow<T>; /** @see convolves */ convolve(...args: Parameters<typeof convolves<T>>): sflow<T[]>; /** act as pipeThrough<T,T> */ portal: { (): sflow<T>; (stream: TransformStream<T, T>): sflow<T>; (fn: (s: sflow<T>) => FlowSource<T>): sflow<T>; // fn must fisrt }; /** act as pipeThrough, alias of by */ through: { (): sflow<T>; (stream: TransformStream<T, T>): sflow<T>; <R>(fn: (s: sflow<T>) => FlowSource<R>): sflow<R>; // fn must fisrt <R>(stream: TransformStream<T, R>): sflow<R>; }; /** act as pipeThrough, alias of through */ by: { (): sflow<T>; (stream: TransformStream<T, T>): sflow<T>; <R>(fn: (s: sflow<T>) => AsyncOrSync<FlowSource<R>>): sflow<R>; // fn must fisrt <R>(stream: TransformStream<T, R>): sflow<R>; }; /** act as pipeThrough, but lazy, only pull upstream when downstream pull */ byLazy: { (stream: TransformStream<T, T>): sflow<T>; <R>(stream: TransformStream<T, R>): sflow<R>; }; /** @deprecated use chunkInterval */ interval(...args: Parameters<typeof chunkIntervals<T>>): sflow<T[]>; chunkInterval(...args: Parameters<typeof chunkIntervals<T>>): sflow<T[]>; debounce(...args: Parameters<typeof debounces<T>>): sflow<T>; filter(fn: (x: T, i: number) => Awaitable<any>): sflow<T>; // fn must fisrt filter(): sflow<NonNullable<T>>; find(fn: (x: T, i: number) => Awaitable<any>): sflow<T>; // fn must fisrt flatMap<R>(...args: Parameters<typeof flatMaps<T, R>>): sflow<R>; /** @deprecated to join another stream, use merge instead */ join(fn: (s: WritableStream<T>) => void | any): sflow<T>; /** @deprecated to join another stream, use merge instead */ join(stream?: ReadableStream<T>): sflow<T>; merge(fn: (s: WritableStream<T>) => void | any): sflow<T>; merge(stream?: ReadableStream<T>): sflow<T>; concat(fn: (s: WritableStream<T>) => void | any): sflow<T>; concat(stream?: ReadableStream<T>): sflow<T>; limit(...args: Parameters<typeof limits<T>>): sflow<T>; head(...args: Parameters<typeof heads<T>>): sflow<T>; map<R>(...args: Parameters<typeof maps<T, R>>): sflow<R>; log(...args: Parameters<typeof logs<T>>): sflow<T>; peek(...args: Parameters<typeof peeks<T>>): sflow<T>; riffle(...args: Parameters<typeof riffles<T>>): sflow<T>; forEach(...args: Parameters<typeof forEachs<T>>): sflow<T>; pMap<R>(fn: (x: T, i: number) => Awaitable<R>): sflow<R>; pMap<R>( fn: (x: T, i: number) => Awaitable<R>, options?: { concurrency?: number; } ): sflow<R>; asyncMap<R>(fn: (x: T, i: number) => Awaitable<R>): sflow<R>; asyncMap<R>( fn: (x: T, i: number) => Awaitable<R>, options?: { concurrency?: number; } ): sflow<R>; reduce(fn: (state: T | undefined, x: T, i: number) => Awaitable<T>): sflow<T>; // fn must fisrt reduce(fn: Reducer<T, T>, initialState: T): sflow<T>; reduce<S>( fn: (state: S | undefined, x: T, i: number) => Awaitable<S> ): sflow<S>; // fn must fisrt reduce<S>(fn: Reducer<S, T>, initialState: S): sflow<S>; reduceEmit(fn: EmitReducer<T, T, T>): sflow<T>; reduceEmit<R>(fn: EmitReducer<T, T, R>): sflow<R>; reduceEmit<S, R>(fn: EmitReducer<S, T, R>, state: S): sflow<R>; skip: (...args: Parameters<typeof skips<T>>) => sflow<T>; slice: (...args: Parameters<typeof slices<T>>) => sflow<T>; tail: (...args: Parameters<typeof tails<T>>) => sflow<T>; uniq: (...args: Parameters<typeof uniqs<T>>) => sflow<T>; uniqBy: <K>(...args: Parameters<typeof uniqBys<T, K>>) => sflow<T>; /** @deprecated use fork, forkTo */ tees(fn: (s: sflow<T>) => void | any): sflow<T>; // fn must fisrt /** @deprecated use fork, forkTo */ tees(stream: WritableStream<T>): sflow<T>; forkTo(fn: (s: sflow<T>) => void | any): sflow<T>; // fn must fisrt forkTo(stream: WritableStream<T>): sflow<T>; /** * fork */ fork(): sflow<T>; throttle: (...args: Parameters<typeof throttles<T>>) => sflow<T>; // prevents abort(...args: Parameters<typeof terminates<T>>): sflow<T>; terminateSignal(...args: Parameters<typeof terminates<T>>): sflow<T>; preventAbort: () => sflow<T>; preventClose: () => sflow<T>; preventCancel: () => sflow<T>; // transform onStart: (start: TransformerStartCallback<T>) => sflow<T>; onTransform: <R>(transform: TransformerTransformCallback<T, R>) => sflow<R>; onFlush: (flush: TransformerFlushCallback<T>) => sflow<T>; // to promises done: () => Promise<void>; end: (pipeTo?: WritableStream<T>) => Promise<void>; run: () => Promise<void>; /** alias of pipeTo */ to: (pipeTo?: WritableStream<T>) => Promise<void>; toArray: () => Promise<T[]>; toEnd: () => Promise<void>; toNil: () => Promise<void>; /** Count stream items, and drop items */ toCount: () => Promise<number>; /** Get first item from stream, and terminate stream */ toFirst: () => Promise<T>; /** Get first item from stream that matches predicate, and terminate stream */ toFirstMatch: (predicate: (value: T, index: number) => Awaitable<any>) => Promise<T | undefined>; /** Get one item from stream * throws if more than 1 item is emitted * return undefined if no item returned * return the item */ toExactlyOne: (options?: { required?: boolean }) => Promise<T | undefined>; /** Get one item from stream * throws if more than 1 item is emitted * return undefined if no item returned * return the item * @deprecated use toExactlyOne */ toOne: (options?: { required?: boolean }) => Promise<T | undefined>; /** Get one item from stream * throws if more than 1 item is emitted * throws if no items emitted, (required defaults to false) * return the item */ toAtLeastOne: (options?: { required?: boolean }) => Promise<T>; /** Returns a promise that always give you latest value of the stream */ toLatest: () => ReturnType<typeof toLatests<T>>; /** Get last item from stream, ignore others */ toLast: () => Promise<T>; toLog(...args: Parameters<typeof logs<T>>): Promise<void>; } type ArrayFlow<T> = T extends ReadonlyArray<any> ? { // inverse of chunk flat: (...args: Parameters<typeof flats<T>>) => sflow<T[number]>; } : {}; type DictionaryFlow<T> = T extends Record<string, any> ? { unwind<K extends FieldPathByValue<T, ReadonlyArray<any>>>( key: K ): sflow<Unwinded<T, K>>; mapAddField: <K extends string, R>( ...args: Parameters<typeof mapAddFields<K, T, R>> ) => sflow< Omit<T, K> & { [key in K]: R; } >; } : {}; type StreamsFlow<T> = T extends ReadableStream<infer R> ? { // merge multiupstreams /** @deprecated use confluencesByBreth */ confluence(...args: Parameters<typeof confluences<R>>): sflow<R>; confluenceByZip(): sflow<R>; confluenceByConcat(): sflow<R>; confluenceByParallel(): sflow<R>; confluenceByAscend(ordFn: (x: R) => Ord): sflow<R>; confluenceByDescend(ordFn: (x: R) => Ord): sflow<R>; // concat() } : {}; type TextFlow<T> = T extends string ? { join: (sep: string) => sflow<string>; lines: ( ...args: Parameters<typeof lines> ) => sflow< ReturnType<typeof lines> extends TransformStream<any, infer R> ? R : never >; match: ( ...args: Parameters<typeof matchs> ) => sflow< ReturnType<typeof matchs> extends TransformStream<any, infer R> ? R : never >; matchAll: ( ...args: Parameters<typeof matchAlls> ) => sflow< ReturnType<typeof matchAlls> extends TransformStream<any, infer R> ? R : never >; replace: ( ...args: Parameters<typeof replaces> ) => sflow< ReturnType<typeof replaces> extends TransformStream<any, infer R> ? R : never >; replaceAll: ( ...args: Parameters<typeof replaceAlls> ) => sflow< ReturnType<typeof replaceAlls> extends TransformStream<any, infer R> ? R : never >; } : {}; type XsvEncodeFlow<T> = T extends Record<string, any> ? { csvFormat: ( ...args: Parameters<typeof csvFormats> ) => sflow< ReturnType<typeof csvFormats> extends TransformStream<any, infer R> ? R : never >; tsvFormat: ( ...args: Parameters<typeof tsvFormats> ) => sflow< ReturnType<typeof tsvFormats> extends TransformStream<any, infer R> ? R : never >; } : {}; type XsvDecodeFlow<T> = T extends string ? { csvParse<S extends string>( header: S ): sflow<Record<Split<S, ",">[number], any>>; csvParse<S extends string[]>(header: S): sflow<Record<S[number], any>>; tsvParse<S extends string>( header: S ): sflow<Record<Split<S, ",">[number], any>>; tsvParse<S extends string[]>(header: S): sflow<Record<S[number], any>>; } : {}; type ToResponse<T> = // toResponse T extends string | Uint8Array ? { toResponse: () => Response; text: () => Promise<string>; json: () => Promise<any>; blob: () => Promise<Blob>; arrayBuffer: () => Promise<ArrayBuffer>; } : {}; export type sflowType<T extends sflow<any>> = T extends sflow<infer R> ? R : never; export type sflow<T> = ReadableStream<T> & AsyncIterableIterator<T> & BaseFlow<T> & ArrayFlow<T> & DictionaryFlow<T> & StreamsFlow<T> & TextFlow<T> & XsvEncodeFlow<T> & XsvDecodeFlow<T> & ToResponse<T>; /** stream flow */ // <T, SRCS extends FlowSource<T>[]>(...streams: SRCS): ReadableStream< // SourcesType<SRCS> // > export function sflow<T0, SRCS extends FlowSource<T0>[] = FlowSource<T0>[]>( ...srcs: SRCS ): sflow<SourcesType<SRCS>> { type T = SourcesType<SRCS>; let r: ReadableStream<T> = srcs.length === 1 ? (toStream(srcs[0]) as ReadableStream<T>) : (concatStream(srcs) as ReadableStream<T>); // @ts-ignore todo return Object.assign(r, { _type: null as T, get readable() { return r; }, // get writable() { // DIE(new Error("WIP, merge into this stream")); // return new WritableStream(); // }, portal: (...args: Parameters<typeof portals>) => sflow(r.pipeThrough(portals(...args))), through: (...args: Parameters<typeof _throughs>) => sflow(r.pipeThrough(_throughs(...args))), by: (...args: Parameters<typeof _throughs>) => sflow(r.pipeThrough(_throughs(...args))), byLazy: <R>(t: TransformStream<T, R>) => _byLazy<T, R>(r, t), mapAddField: ( ...args: Parameters<typeof mapAddFields> // @ts-ignore ) => sflow(r.pipeThrough(mapAddFields(...args))), cacheSkip: (...args: Parameters<typeof cacheSkips>) => sflow(r).byLazy(cacheSkips(...args)), cacheList: (...args: Parameters<typeof cacheLists>) => sflow(r).byLazy(cacheLists(...args)), cacheTail: (...args: Parameters<typeof cacheTails>) => sflow(r).byLazy(cacheTails(...args)), chunkBy: (...args: Parameters<typeof chunkBys>) => sflow(r.pipeThrough(chunkBys(...args))), chunkIf: (...args: Parameters<typeof chunkIfs>) => sflow(r.pipeThrough(chunkIfs(...args))), buffer: (...args: Parameters<typeof chunks>) => sflow(r.pipeThrough(chunks(...args))), chunk: (...args: Parameters<typeof chunks>) => sflow(r.pipeThrough(chunks(...args))), convolve: (...args: Parameters<typeof convolves>) => sflow(r.pipeThrough(convolves(...args))), abort: (...args: Parameters<typeof terminates>) => sflow(r.pipeThrough(terminates(...args))), chunkInterval: (...args: Parameters<typeof chunkIntervals>) => sflow(r.pipeThrough(chunkIntervals(...args))), /** @deprecated */ interval: (...args: Parameters<typeof chunkIntervals>) => sflow(r.pipeThrough(chunkIntervals(...args))), debounce: (...args: Parameters<typeof debounces>) => sflow(r.pipeThrough(debounces(...args))), filter: (...args: Parameters<typeof filters>) => sflow(r.pipeThrough(filters(...args))), find: (...args: Parameters<typeof finds>) => sflow(r.pipeThrough(finds(...args))), flatMap: (...args: Parameters<typeof flatMaps>) => sflow(r.pipeThrough(flatMaps(...args))), flat: ( ...args: Parameters<typeof flats> // @ts-expect-error array only ) => sflow(r).by(flats(...args)), join: (...args: Parameters<typeof riffles>) => sflow(r.pipeThrough(riffles(...args))), match: ( ...args: Parameters<typeof matchs> // @ts-expect-error string only ) => sflow(r.pipeThrough(matchs(...args))), matchAll: ( ...args: Parameters<typeof matchAlls> // @ts-expect-error string only ) => sflow(r.pipeThrough(matchAlls(...args))), replace: ( ...args: Parameters<typeof replaces> // @ts-expect-error string only ) => sflow(r.pipeThrough(replaces(...args))), replaceAll: ( ...args: Parameters<typeof replaceAlls> // @ts-expect-error string only ) => sflow(r.pipeThrough(replaceAlls(...args))), // stream merging merge: (...args: FlowSource<T>[]) => sflow(r.pipeThrough(merges(...args))), concat: (srcs: FlowSource<FlowSource<T>>) => // @ts-ignore streams only sflow(r.pipeThrough(concats(srcs))), confluence: ( ...args: Parameters<typeof confluences> // @ts-ignore streams only ) => sflow(r.pipeThrough(confluences(...args))), confluenceByZip: () => sflow(r) // @ts-ignore upstream accepts streams only .by(confluences()), confluenceByConcat: () => sflow(r) // @ts-ignore upstream accepts streams only .by((srcs) => concatStream(srcs)), confluenceByParallel: () => sflow(r) // @ts-ignore upstream accepts streams only .by((srcs: ReadableStream<FlowSource<T>>) => sflow(srcs) .toArray() .then((srcs: FlowSource<T>[]) => mergeStream(...srcs)) ) // @ts-ignore upstream accepts streams only .confluence(), confluenceByAscend: (ordFn: (x: T) => Ord) => sflow(r) .chunk() // @ts-ignore upstream accepts streams only .map((srcs) => mergeStreamsByAscend(ordFn, srcs)) .confluence(), confluenceByDescend: (ordFn: (x: T) => Ord) => sflow(r) .chunk() // @ts-ignore upstream accepts streams only .map((srcs) => mergeStreamsByDescend(ordFn, srcs)) .confluence(), limit: (...args: Parameters<typeof limits>) => sflow(r).byLazy(limits(...args)), head: (...args: Parameters<typeof heads>) => sflow(r.pipeThrough(heads(...args))), map: (...args: Parameters<typeof maps>) => sflow(r.pipeThrough(maps(...args))), log: (...args: Parameters<typeof logs>) => sflow(r.pipeThrough(logs(...args))), uniq: (...args: Parameters<typeof uniqs>) => sflow(r.pipeThrough(uniqs(...args))), uniqBy: (...args: Parameters<typeof uniqBys>) => sflow(r.pipeThrough(uniqBys(...args))), unwind: ( ...args: Parameters<typeof unwinds> // @ts-ignore ) => sflow(r.pipeThrough(unwinds(...args))), asyncMap: (...args: Parameters<typeof asyncMaps>) => sflow(r.pipeThrough(asyncMaps(...args))), pMap: (...args: Parameters<typeof pMaps>) => sflow(r.pipeThrough(pMaps(...args))), peek: (...args: Parameters<typeof peeks>) => sflow(r.pipeThrough(peeks(...args))), riffle: (...args: Parameters<typeof riffles>) => sflow(r.pipeThrough(riffles(...args))), forEach: (...args: Parameters<typeof forEachs>) => sflow(r.pipeThrough(forEachs(...args))), reduce: (...args: Parameters<typeof reduces>) => sflow(r.pipeThrough(reduces(...args))), reduceEmit: (...args: Parameters<typeof reduceEmits>) => sflow(r.pipeThrough(reduceEmits(...args))), skip: (...args: Parameters<typeof skips>) => sflow(r.pipeThrough(skips(...args))), slice: (...args: Parameters<typeof slices>) => sflow(r.pipeThrough(slices(...args))), tail: (...args: Parameters<typeof tails>) => sflow(r.pipeThrough(tails(...args))), tees: (...args: Parameters<typeof _tees>) => sflow(r.pipeThrough(_tees(...args))), forkTo: (...args: Parameters<typeof _tees>) => sflow(r.pipeThrough(_tees(...args))), fork: () => { let b; [r, b] = r.tee(); return sflow(b); }, throttle: (...args: Parameters<typeof throttles>) => sflow(r.pipeThrough(throttles(...args))), // line based data stream csvFormat: ( ...args: Parameters<typeof csvFormats> // @ts-expect-error xsv ) => sflow(r.pipeThrough(csvFormats(...args))), tsvFormat: ( ...args: Parameters<typeof tsvFormats> // @ts-expect-error xsv ) => sflow(r.pipeThrough(tsvFormats(...args))), csvParse: ( ...args: Parameters<typeof csvParses> // @ts-expect-error xsv ) => sflow(r.pipeThrough(csvParses(...args))), tsvParse: ( ...args: Parameters<typeof tsvParses> // @ts-expect-error xsv ) => sflow(r.pipeThrough(tsvParses(...args))), // prevents /** prevent upstream abort, ignore upstream errors */ preventAbort: () => sflow(r.pipeThrough(throughs(), { preventAbort: true })), /** prevent upstream close */ preventClose: () => sflow(r.pipeThrough(throughs(), { preventClose: true })), /** prevent downstream cancel, ignore downstream errors */ preventCancel: () => sflow(r.pipeThrough(throughs(), { preventCancel: true })), onStart: (start: TransformerStartCallback<T>) => sflow(r).by(new TransformStream({ start })), onTransform: <R>(transform: TransformerTransformCallback<T, R>) => sflow(r).by(new TransformStream({ transform })), onFlush: (flush: TransformerFlushCallback<T>) => sflow(r).by(new TransformStream({ flush })), // to promises done: () => r.pipeTo(nils<T>()), end: (dst = nils<T>()) => r.pipeTo(dst), to: (dst = nils<T>()) => r.pipeTo(dst), run: () => r.pipeTo(nils<T>()), toEnd: () => r.pipeTo(nils<T>()), toNil: () => r.pipeTo(nils<T>()), toArray: () => wseToArray(r), toCount: async () => { let i = 0; const d = r.getReader(); while (!(await d.read()).done) i++; return i; }, // toCount: async () => // (async function () { // let i = 0; // await r.pipeTo(new WritableStream({ write: () => void i++ })); // return i; // })(), // toCount: async () => (await wseToArray(r)).length, // toCount: async () => // (await sflow(r) // .map((_, i) => i + 1) // .toLast()) ?? 0, // TODO: optimize memory usage toFirst: () => wseToPromise(sflow(r).limit(1, { terminate: true })), toFirstMatch: (predicate: (value: T, index: number) => Awaitable<any>) => wseToPromise(sflow(r).find(predicate)), toLast: () => wseToPromise(sflow(r).tail(1)), toExactlyOne: async () => { const a = await wseToArray(r); a.length !== 1 || DIE(`Expect exactly 1 Item, but got ${a.length}`); return a[0]!; }, toOne: async () => { const a = await wseToArray(r); if (a.length > 1) DIE(`Expect only 1 Item, but got ${a.length}`); return a[0]; }, toAtLeastOne: async () => { const a = await wseToArray(r); if (a.length > 1) DIE(`Expect only 1 Item, but got ${a.length}`); if (a.length < 1) DIE(`Expect at least 1 Item, but got ${a.length}`); return a[0]; }, toLatest: () => toLatests<T>(sflow(r)), /** call console.log on every item */ toLog: (...args: Parameters<typeof logs<T>>) => sflow(r.pipeThrough(logs(...args))).done(), // string stream process lines: ( ...args: Parameters<typeof lines> // @ts-expect-error works on string only ) => sflow(r.pipeThrough(lines(...args))), // as response (only ReadableStream<string | UInt8Array>) toResponse: (init?: ResponseInit) => new Response(r, init), text: (init?: ResponseInit) => new Response( (r as ReadableStream<string | Uint8Array>).pipeThrough( new PolyfillTextEncoderStream() ), init ).text(), json: (init?: ResponseInit) => new Response( (r as ReadableStream<string | Uint8Array>).pipeThrough( new PolyfillTextEncoderStream() ), init ).json(), blob: (init?: ResponseInit) => new Response(sflow(r), init).blob(), arrayBuffer: (init?: ResponseInit) => new Response(r, init).arrayBuffer(), // as iterator // [Symbol.asyncDispose]: async () => await r.pipeTo(nils()), [Symbol.asyncIterator]: streamAsyncIterator<T>, }); } export const _tees: { <T>(fn: (s: sflow<T>) => void | any): TransformStream<T, T>; <T>(stream?: WritableStream<T>): TransformStream<T, T>; } = (arg) => { if (!arg) return new TransformStream(); if (arg instanceof WritableStream) return tees((s) => s.pipeTo(arg)); const fn = arg; const { writable, readable } = new TransformStream(); const [a, b] = readable.tee(); // @ts-ignore fn(sflow(a)); return { writable, readable: b }; }; export const _throughs: { <T>(stream?: TransformStream<T, T>): TransformStream<T, T>; <T, R>(stream: TransformStream<T, R>): TransformStream<T, R>; <T, R>(fn: (s: sflow<T>) => FlowSource<R>): TransformStream<T, R>; } = (arg: any) => { if (!arg) return new TransformStream(); if (typeof arg !== "function") return throughs((s) => s.pipeThrough(arg)); const fn = arg; const { writable, readable } = new TransformStream(); return { writable, readable: sflow(fn(sflow(readable))) }; }; /** * byLazy is a lazy version of by, it only pull upstream when downstream pull. * * Warning: does not work with empty stream yet. */ export function _byLazy<T, R>( r: ReadableStream<T>, t: TransformStream<T, R> ): sflow<R> { const reader = r.getReader(); const tw = t.writable.getWriter(); const tr = t.readable.getReader(); return sflow<R>( new ReadableStream<R>( { start: async (ctrl) => { (async function () { while (true) { const { done, value } = await tr.read(); if (done) return ctrl.close(); ctrl.enqueue(value); } })(); }, pull: async (ctrl) => { const { done, value } = await reader.read(); if (done) return tw.close(); await tw.write(value); }, cancel: async (r) => { reader.cancel(r); tr.cancel(r); }, }, { highWaterMark: 0 } ) ); }