UNPKG

sflow

Version:

sflow is a powerful and highly-extensible library designed for processing and manipulating streams of data effortlessly. Inspired by the functional programming paradigm, it provides a rich set of utilities for transforming streams, including chunking, fil

1,663 lines (1,589 loc) 67.6 kB
(function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : typeof define === 'function' && define.amd ? define(['exports'], factory) : (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sflow = {})); })(this, (function (exports) { 'use strict'; var __create = Object.create; var __getProtoOf = Object.getPrototypeOf; var __defProp = Object.defineProperty; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __toESM = (mod, isNodeMode, target) => { target = mod != null ? __create(__getProtoOf(mod)) : {}; const to = __defProp(target, "default", { value: mod, enumerable: true }) ; for (let key of __getOwnPropNames(mod)) if (!__hasOwnProp.call(to, key)) __defProp(to, key, { get: () => mod[key], enumerable: true }); return to; }; var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports); var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true, configurable: true, set: (newValue) => all[name] = () => newValue }); }; // node_modules/unwind-array/src/index.js var require_src = __commonJS((exports, module) => { var unwind = (dataObject, options) => { const unwindRecursive = (dataObject2, path, currPath) => { const pathArr = path.split("."); if (!currPath) { currPath = pathArr[0]; } const result = []; let added = false; const addObject = (objectTempUnwind, objectKey) => { Object.keys(objectTempUnwind).forEach((objectTempUnwindKey) => { const newObjectCopy = {}; Object.keys(dataObject2).forEach((dataObjectKey) => { newObjectCopy[dataObjectKey] = dataObject2[dataObjectKey]; }); newObjectCopy[objectKey] = objectTempUnwind[objectTempUnwindKey]; added = true; result.push(newObjectCopy); }); }; Object.keys(dataObject2).forEach((objectKey) => { if (currPath === objectKey) { if (dataObject2[objectKey] instanceof Array) { if (dataObject2[objectKey].length === 0 && options.preserveEmptyArray !== true) { delete dataObject2[objectKey]; } else { Object.keys(dataObject2[objectKey]).forEach((objectElementKey) => { addObject(unwindRecursive(dataObject2[objectKey][objectElementKey], path.replace(`${currPath}.`, "")), objectKey); }); } } else { addObject(unwindRecursive(dataObject2[objectKey], path.replace(`${currPath}.`, "")), objectKey); } } }); if (!added) { result.push(dataObject2); } return result; }; return unwindRecursive(dataObject, options.path); }; module.exports = { unwind }; }); // node_modules/phpdie/dist/index.js var phpdie_default = DIE; function DIE(reason, ...slots) { throw errorFormat(reason, ...slots); } function errorFormat(reason, ...slots) { if (typeof reason === "string") { return reason.trim(); } if (Array.isArray(reason)) { return reason.map((e, i) => e + (slots[i] ?? "")).join(""); } return reason; } // src/andIgnoreError.ts function andIgnoreError(regex) { return (error) => error?.message?.match(regex) ? null : phpdie_default(error); } // node_modules/polyfill-text-encoder-stream/dist/index.js class PolyfillTextEncoderStream { _encoder = new TextEncoder; _reader = null; ready = Promise.resolve(); closed = false; readable = new ReadableStream({ start: (controller) => { this._reader = controller; } }); writable = new WritableStream({ write: async (chunk) => { if (typeof chunk !== "string") { this._reader.enqueue(chunk); return; } if (chunk != null && this._reader) { const encoded = this._encoder.encode(chunk); this._reader.enqueue(encoded); } }, close: () => { this._reader?.close(); this.closed = true; }, abort: (reason) => { this._reader?.error(reason); this.closed = true; } }); } // src/asyncMaps.ts var asyncMaps = (fn, options = {}) => { let i = 0; const tasks = new Map; return new TransformStream({ transform: async (chunk, ctrl) => { const id = i++; tasks.set(id, (async () => fn(chunk, id))().then((data) => ({ id, data }))); if (tasks.size >= (options.concurrency ?? Infinity)) { const { id: id2, data } = await Promise.race(tasks.values()); tasks.delete(id2); ctrl.enqueue(data); } }, flush: async (ctrl) => { while (tasks.size) { const { id, data } = await Promise.race(tasks.values()); tasks.delete(id); ctrl.enqueue(data); } } }); }; // src/never.ts var never = () => new Promise(() => null); // src/cacheLists.ts function cacheLists(store, _options) { const { key = new Error().stack ?? phpdie_default("missing cache key") } = typeof _options === "string" ? { key: _options } : _options ?? {}; const chunks = []; const cacheHitPromise = store.has?.(key) || store.get(key); let hitflag = false; return new TransformStream({ start: async (ctrl) => { if (!await cacheHitPromise) return; const cached = await store.get(key); if (!cached) return; cached.map((c) => ctrl.enqueue(c)); hitflag = true; }, transform: async (chunk, ctrl) => { if (await cacheHitPromise || hitflag) { ctrl.terminate(); return never(); } chunks.push(chunk); ctrl.enqueue(chunk); }, flush: async () => await store.set(key, chunks) }); } // src/cacheSkips.ts function cacheSkips(store, _options) { const { key = new Error().stack ?? phpdie_default("missing cache key"), windowSize = 1 } = typeof _options === "string" ? { key: _options } : _options ?? {}; const chunks = []; const cachePromise = store.get(key); return new TransformStream({ transform: async (chunk, ctrl) => { const cache = await cachePromise; const chunked = JSON.stringify(chunk); const inf404 = (idx) => idx == null || idx < 0 ? Infinity : idx; const hitCache = (item) => JSON.stringify(item) === chunked; const cachedContents = cache?.slice(inf404(cache.findIndex(hitCache))); if (cachedContents?.length) { await store.set(key, [...chunks, ...cachedContents].slice(0, windowSize)); ctrl.terminate(); return await never(); } chunks.push(chunk); ctrl.enqueue(chunk); }, flush: async () => { await store.set(key, chunks.slice(0, windowSize)); } }); } // node_modules/rambda/src/_internals/cloneList.js var cloneList = (list) => Array.prototype.slice.call(list); // node_modules/rambda/src/_internals/isArray.js var { isArray } = Array; // node_modules/rambda/src/type.js function type(input) { if (input === null) { return "Null"; } else if (input === undefined) { return "Undefined"; } else if (Number.isNaN(input)) { return "NaN"; } const typeResult = Object.prototype.toString.call(input).slice(8, -1); return typeResult === "AsyncFunction" ? "Promise" : typeResult; } // node_modules/rambda/src/equals.js function _indexOf(valueToFind, list) { if (!isArray(list)) throw new Error(`Cannot read property 'indexOf' of ${list}`); const typeOfValue = type(valueToFind); if (!["Array", "NaN", "Object", "RegExp"].includes(typeOfValue)) return list.indexOf(valueToFind); let index = -1; let foundIndex = -1; const { length } = list; while (++index < length && foundIndex === -1) if (equals(list[index], valueToFind)) foundIndex = index; return foundIndex; } function _arrayFromIterator(iter) { const list = []; let next; while (!(next = iter.next()).done) list.push(next.value); return list; } function _compareSets(a, b) { if (a.size !== b.size) return false; const aList = _arrayFromIterator(a.values()); const bList = _arrayFromIterator(b.values()); const filtered = aList.filter((aInstance) => _indexOf(aInstance, bList) === -1); return filtered.length === 0; } function compareErrors(a, b) { if (a.message !== b.message) return false; if (a.toString !== b.toString) return false; return a.toString() === b.toString(); } function parseDate(maybeDate) { if (!maybeDate.toDateString) return [false]; return [true, maybeDate.getTime()]; } function parseRegex(maybeRegex) { if (maybeRegex.constructor !== RegExp) return [false]; return [true, maybeRegex.toString()]; } function equals(a, b) { if (arguments.length === 1) return (_b) => equals(a, _b); if (Object.is(a, b)) return true; const aType = type(a); if (aType !== type(b)) return false; if (aType === "Function") return a.name === undefined ? false : a.name === b.name; if (["NaN", "Null", "Undefined"].includes(aType)) return true; if (["BigInt", "Number"].includes(aType)) { if (Object.is(-0, a) !== Object.is(-0, b)) return false; return a.toString() === b.toString(); } if (["Boolean", "String"].includes(aType)) return a.toString() === b.toString(); if (aType === "Array") { const aClone = Array.from(a); const bClone = Array.from(b); if (aClone.toString() !== bClone.toString()) return false; let loopArrayFlag = true; aClone.forEach((aCloneInstance, aCloneIndex) => { if (loopArrayFlag) { if (aCloneInstance !== bClone[aCloneIndex] && !equals(aCloneInstance, bClone[aCloneIndex])) loopArrayFlag = false; } }); return loopArrayFlag; } const aRegex = parseRegex(a); const bRegex = parseRegex(b); if (aRegex[0]) return bRegex[0] ? aRegex[1] === bRegex[1] : false; else if (bRegex[0]) return false; const aDate = parseDate(a); const bDate = parseDate(b); if (aDate[0]) return bDate[0] ? aDate[1] === bDate[1] : false; else if (bDate[0]) return false; if (a instanceof Error) { if (!(b instanceof Error)) return false; return compareErrors(a, b); } if (aType === "Set") return _compareSets(a, b); if (aType === "Object") { const aKeys = Object.keys(a); if (aKeys.length !== Object.keys(b).length) return false; let loopObjectFlag = true; aKeys.forEach((aKeyInstance) => { if (loopObjectFlag) { const aValue = a[aKeyInstance]; const bValue = b[aKeyInstance]; if (aValue !== bValue && !equals(aValue, bValue)) loopObjectFlag = false; } }); return loopObjectFlag; } return false; } // node_modules/rambda/src/sortBy.js function sortBy(sortFn, list) { if (arguments.length === 1) return (_list) => sortBy(sortFn, _list); const clone = cloneList(list); return clone.sort((a, b) => { const aSortResult = sortFn(a); const bSortResult = sortFn(b); if (aSortResult === bSortResult) return 0; return aSortResult < bSortResult ? -1 : 1; }); } // src/cacheTails.ts function cacheTails(store, _options) { const { key = new Error().stack ?? phpdie_default("missing cache key") } = typeof _options === "string" ? { key: _options } : _options ?? {}; const chunks = []; const cachePromise = Promise.withResolvers(); const t = new TransformStream; const w = t.writable.getWriter(); const writable = new WritableStream({ start: async () => cachePromise.resolve(await store.get(key) ?? []), write: async (chunk, ctrl) => { const cache = await cachePromise.promise; if (cache && equals(chunk, cache[0])) { await store.set(key, [...chunks, ...cache]); for await (const item of cache) await w.write(item); await w.close(); ctrl.error(new Error("cached")); return await never(); } chunks.push(chunk); await w.write(chunk); }, close: async () => { await store.set(key, [...chunks]); await w.close(); }, abort: () => w.abort() }); return { writable, readable: t.readable }; } // src/chunkBys.ts function chunkBys(compareFn) { const chunks = []; let lastOrder; return new TransformStream({ transform: async (chunk, ctrl) => { const order = await compareFn(chunk); if (lastOrder && lastOrder !== order) ctrl.enqueue(chunks.splice(0, Infinity)); chunks.push(chunk); lastOrder = order; }, flush: async (ctrl) => void (chunks.length && ctrl.enqueue(chunks)) }); } // src/chunkIfs.ts function chunkIfs(predicate, { inclusive = false } = {}) { const chunks = []; let i = 0; return new TransformStream({ transform: async (chunk, ctrl) => { const cond = await predicate(chunk, i++, chunks); if (!inclusive && !cond) chunks.length && ctrl.enqueue(chunks.splice(0, Infinity)); chunks.push(chunk); if (!cond) ctrl.enqueue(chunks.splice(0, Infinity)); }, flush: async (ctrl) => void (chunks.length && ctrl.enqueue(chunks)) }); } // src/chunkIntervals.ts function chunkIntervals(interval = 0) { const chunks = []; let id = null; return new TransformStream({ start: (ctrl) => { id = setInterval(() => ctrl.enqueue(chunks.splice(0, Infinity)), interval); }, transform: async (chunk) => { chunks.push(chunk); }, flush: async (ctrl) => { if (chunks.length) ctrl.enqueue(chunks.splice(0, Infinity)); id !== null && clearInterval(id); } }); } // src/chunks.ts function chunks(n = Infinity) { const chunks2 = []; if (n <= 0) throw new Error("Buffer size must be greater than 0"); return new TransformStream({ transform: async (chunk, ctrl) => { chunks2.push(chunk); if (chunks2.length >= n) ctrl.enqueue(chunks2.splice(0, Infinity)); }, flush: async (ctrl) => void (chunks2.length && ctrl.enqueue(chunks2)) }); } // node_modules/web-streams-extensions/dist/esm/operators/map.js function map(select) { let reader = null; async function flush(controller) { try { while (controller.desiredSize > 0 && reader != null) { let next = await reader.read(); if (next.done) { controller.close(); reader = null; } else { let mapped = await select(next.value); if (mapped !== undefined) controller.enqueue(mapped); } } } catch (err) { controller.error(err); } } return function(src, opts) { return new ReadableStream({ start(controller) { reader = src.getReader(); return flush(controller); }, pull(controller) { return flush(controller); }, cancel(reason) { if (reader) { reader.cancel(reason); reader.releaseLock(); reader = null; } } }, opts); }; } // node_modules/web-streams-extensions/dist/esm/_readable-like.js function isReadableLike(obj) { return obj["readable"] != null; } // node_modules/web-streams-extensions/dist/esm/from.js function from(src) { let it; async function flush(controller) { try { while (controller.desiredSize > 0 && it != null) { let next = await it.next(); if (next.done) { it = null; controller.close(); } else { controller.enqueue(next.value); } } } catch (err) { controller.error(err); } } if (isReadableLike(src)) { return src.readable; } return new ReadableStream({ async start(controller) { let iterable; if (typeof src == "function") { src = src(); } if (Symbol.asyncIterator && src[Symbol.asyncIterator]) iterable = src[Symbol.asyncIterator].bind(src); else if (src[Symbol.iterator]) iterable = src[Symbol.iterator].bind(src); else { let value = await Promise.resolve(src); controller.enqueue(value); controller.close(); return; } it = iterable(); return flush(controller); }, async pull(controller) { return flush(controller); }, async cancel(reason) { if (reason && it && it.throw) { it.throw(reason); } else if (it && it.return) { await it.return(); } it = null; } }); } // node_modules/web-streams-extensions/dist/esm/operators/through.js function through(dst) { return function(src) { return src.pipeThrough(dst); }; } // node_modules/web-streams-extensions/dist/esm/pipe.js function pipe(src, ...ops) { if (isReadableLike(src)) { src = src.readable; } return ops.map((x) => isTransform(x) ? through(x) : x).reduce((p, c) => { return c(p, { highWaterMark: 1 }); }, src); } function isTransform(x) { return x["readable"] != null && x["writable"] != null; } // node_modules/web-streams-extensions/dist/esm/utils/signal.js class Gate { _count; _queue = []; constructor(_count) { this._count = _count; } async wait() { if (this._count > 0) { --this._count; return Promise.resolve(); } return new Promise((r) => { let cb = () => { this._queue.splice(this._queue.indexOf(cb), 1); --this._count; r(); }; this._queue.push(cb); }); } increment() { ++this._count; this.clearQueue(); } setCount(count) { this._count = count; this.clearQueue(); } clearQueue() { while (this._count > 0 && this._queue.length > 0) { this._queue.shift()(); } } } class BlockingQueue { _pushers = []; _pullers = []; constructor() {} async push(value) { return new Promise((r) => { this._pushers.unshift(() => { r(); return value; }); this.dequeue(); }); } async pull() { return new Promise((r) => { this._pullers.unshift((value) => { r(value); }); this.dequeue(); }); } dequeue() { while (this._pullers.length > 0 && this._pushers.length > 0) { let puller = this._pullers.pop(); let pusher = this._pushers.pop(); puller(pusher()); } } } // node_modules/web-streams-extensions/dist/esm/operators/schedule.js function schedule(scheduler) { let reader = null; async function flush(controller) { try { while (controller.desiredSize > 0 && reader != null) { let next = await reader.read(); if (next.done) { controller.close(); reader = null; } else { await scheduler.nextTick(); controller.enqueue(next.value); } } } catch (err) { controller.error(err); } } return function(src, opts) { return new ReadableStream({ start(controller) { reader = src.getReader(); return flush(controller); }, pull(controller) { return flush(controller); }, cancel(reason) { if (reader) { reader.cancel(reason); reader.releaseLock(); reader = null; } } }, opts); }; } // node_modules/web-streams-extensions/dist/esm/operators/on.js function on(callbacks) { let reader = null; async function flush(controller) { try { while (controller.desiredSize > 0 && reader != null) { let next = await reader.read(); if (next.done) { controller.close(); reader = null; if (callbacks.complete) callbacks.complete(); } else { controller.enqueue(next.value); } } } catch (err) { controller.error(err); if (callbacks.error) callbacks.error(err); } } return function(src, opts) { return new ReadableStream({ start(controller) { reader = src.getReader(); if (callbacks.start) callbacks.start(); return flush(controller); }, pull(controller) { return flush(controller); }, cancel(reason) { if (reader) { reader.cancel(reason); reader.releaseLock(); reader = null; if (callbacks.complete) callbacks.complete(reason); } } }, opts); }; } // node_modules/web-streams-extensions/dist/esm/to-promise.js async function toPromise(src) { let res = undefined; if (isReadableLike(src)) { src = src.readable; } let reader = src.getReader(); let done = false; while (done == false) { let next = await reader.read(); done = next.done; if (!done) res = next.value; } return res; } // node_modules/web-streams-extensions/dist/esm/operators/merge.js function merge(concurrent = Infinity) { if (concurrent == 0) throw Error("zero is an invalid concurrency limit"); return function(src) { let outerGate = new Gate(concurrent); let innerQueue = new BlockingQueue; let errored = null; return new ReadableStream({ start(outerController) { let reading = []; let readingDone = false; toPromise(pipe(src, schedule({ nextTick: async () => { await outerGate.wait(); } }), map((innerStream) => { if (!(innerStream instanceof ReadableStream)) { innerStream = from(innerStream); } reading.push(innerStream); pipe(innerStream, map(async (value) => { await innerQueue.push({ done: false, value }); }), on({ error(err) { outerController.error(err); }, complete() { outerGate.increment(); reading.splice(reading.indexOf(innerStream), 1); if (reading.length == 0 && readingDone) { innerQueue.push({ done: true }); } } })); }), on({ error(err) { outerController.error(err); errored = err; }, complete() { readingDone = true; } }))).catch((err) => { outerController.error(err); }); }, async pull(controller) { while (controller.desiredSize > 0) { let next = await innerQueue.pull(); if (errored) { controller.error(errored); } if (next.done) { controller.close(); } else { controller.enqueue(next.value); } } }, cancel(reason) { } }); }; } // node_modules/web-streams-extensions/dist/esm/to-array.js async function toArray(src) { let res = []; if (isReadableLike(src)) { src = src.readable; } let reader = src.getReader(); try { let done = false; while (done == false) { let next = await reader.read(); done = next.done; if (!done) res.push(next.value); } } finally { reader.releaseLock(); } return res; } // src/froms.ts var toStream = (src) => src instanceof ReadableStream ? src : from(src ?? []); // src/maps.ts function maps(fn, options) { const concurrency = options?.concurrency ?? 1; if (concurrency === 1) { let i2 = 0; return new TransformStream({ transform: async (chunk, ctrl) => { const ret = fn(chunk, i2++); const val = ret instanceof Promise ? await ret : ret; ctrl.enqueue(val); } }); } let i = 0; const promises = []; return new TransformStream({ transform: async (chunk, ctrl) => { promises.push(fn(chunk, i++)); if (promises.length >= concurrency) { ctrl.enqueue(await promises.shift()); } }, flush: async (ctrl) => { while (promises.length) { ctrl.enqueue(await promises.shift()); } } }); } // src/nils.ts function nils() { return new WritableStream; } function nil() { return null; } // src/concats.ts var concats = (srcs) => { if (!srcs) return new TransformStream; const upstream = new TransformStream; return { writable: upstream.writable, readable: concatStream([upstream.readable, concatStream(srcs)]) }; }; var concatStream = (srcs) => { if (!srcs) return new ReadableStream({ start: (c) => c.close() }); const t = new TransformStream; const w = t.writable.getWriter(); toStream(srcs).pipeThrough(maps(toStream)).pipeThrough(maps(async (s) => { const r = s.getReader(); while (true) { const { value, done } = await r.read(); if (done) break; await w.write(value); } })).pipeTo(nils()).then(() => w.close()).catch((reason) => w.abort(reason)); return t.readable; }; // src/confluences.ts var confluences = ({ order = "breadth" } = {}) => { if (order !== "breadth") phpdie_default("not implemented"); const { writable, readable: sources } = new TransformStream; const srcsQueue = []; const readable = new ReadableStream({ async pull(ctrl) { while (true) { const src = await (async () => { const r2 = sources.getReader(); const { done: done2, value: src2 } = await r2.read(); r2.releaseLock(); if (done2) return srcsQueue.shift(); return src2; })(); if (!src) return ctrl.close(); const r = src.getReader(); const { done, value } = await r.read(); r.releaseLock(); if (done) continue; srcsQueue.push(src); ctrl.enqueue(value); return; } } }); return { writable, readable }; }; // src/convolves.ts function convolves(n) { const buffer2 = []; return new TransformStream({ transform(chunk, controller) { buffer2.push(chunk); if (buffer2.length > n) buffer2.shift(); if (buffer2.length === n) controller.enqueue([...buffer2]); }, flush(controller) { while (buffer2.length > 1) { buffer2.shift(); if (buffer2.length === n) controller.enqueue([...buffer2]); } } }); } // src/debounces.ts function debounces(t) { let id = null; return new TransformStream({ transform: async (chunk, ctrl) => { if (id) clearTimeout(id); id = setTimeout(() => { ctrl.enqueue(chunk); id = null; }, t); }, flush: async () => { while (id) await new Promise((r) => setTimeout(r, t / 2)); } }); } // src/filters.ts var filters = (fn) => { let i = 0; return new TransformStream({ transform: async (chunk, ctrl) => { if (fn) { const shouldEnqueue = await fn(chunk, i++); if (shouldEnqueue) ctrl.enqueue(chunk); } else { const isNull = chunk === undefined || chunk === null; if (!isNull) ctrl.enqueue(chunk); } } }); }; // src/finds.ts function finds(predicate) { let index = 0; let found = false; return new TransformStream({ async transform(chunk, controller) { if (found) return; const shouldEmit = await predicate(chunk, index++); if (shouldEmit) { found = true; controller.enqueue(chunk); controller.terminate(); } } }); } // src/flatMaps.ts function flatMaps(fn) { let i = 0; return new TransformStream({ transform: async (chunk, ctrl) => { const ret = fn(chunk, i++); const val = ret instanceof Promise ? await ret : ret; val.map((e) => ctrl.enqueue(e)); } }); } // src/flats.ts function flats() { return exports_sf.composers(exports_sf.filters((e) => e.length)).by(new TransformStream({ transform: async (a, ctrl) => { a.map((e) => ctrl.enqueue(e)); } })); } // src/forEachs.ts function forEachs(fn, options) { const concurrency = options?.concurrency ?? 1; if (concurrency === 1) { let i2 = 0; return new TransformStream({ transform: async (chunk, ctrl) => { const ret = fn(chunk, i2++); ret instanceof Promise && await ret; ctrl.enqueue(chunk); } }); } let i = 0; const promises = []; const chunks2 = []; return new TransformStream({ transform: async (chunk, ctrl) => { promises.push(fn(chunk, i++)); chunks2.push(chunk); if (promises.length >= concurrency) { await promises.shift(); const chunk2 = chunks2.shift(); if (chunk2 === undefined) throw new Error("chunks.shift() returned undefined"); ctrl.enqueue(chunk2); } }, flush: async (ctrl) => { while (promises.length) { await promises.shift(); const chunk = chunks2.shift(); if (chunk === undefined) throw new Error("chunks.shift() returned undefined"); ctrl.enqueue(chunk); } } }); } // src/heads.ts function heads(n = 1) { return new TransformStream({ transform: async (chunk, ctrl) => { return n-- > 0 ? ctrl.enqueue(chunk) : await never(); } }); } // src/limits.ts function limits(n, { terminate = true } = {}) { return new TransformStream({ transform: async (chunk, ctrl) => { ctrl.enqueue(chunk); if (--n === 0) { terminate && ctrl.terminate(); return never(); } }, flush: () => {} }, { highWaterMark: 1 }, { highWaterMark: 0 }); } // src/throughs.ts var throughs = (arg) => { if (!arg) return new TransformStream; if (typeof arg !== "function") return throughs((s) => s.pipeThrough(arg)); const fn = arg; const { writable, readable } = new TransformStream; return { writable, readable: fn(readable) }; }; // src/lines.ts var lines = ({ EOL = "KEEP" } = {}) => { const CRLFMap = { KEEP: "$1", LF: ` `, CRLF: `\r `, NONE: "" }; return throughs((r) => r.pipeThrough(flatMaps((s) => s.split(/(?<=\n)/g))).pipeThrough(chunkIfs((ch) => ch.indexOf(` `) === -1, { inclusive: true })).pipeThrough(maps((chunks2) => chunks2.join("").replace(/(\r?\n?)$/, CRLFMap[EOL])))); }; // src/unpromises.ts function unpromises(promise) { const tr = new TransformStream; (async () => { const s = await promise; await s.pipeTo(tr.writable); })().catch((error) => { tr.readable.cancel(error).catch(() => { throw error; }); }).then(); return tr.readable; } // src/bys.ts function bys(arg) { if (!arg) return new TransformStream; if (typeof arg !== "function") return bys((s) => s.pipeThrough(arg)); const fn = arg; const { writable, readable } = new TransformStream; return { writable, readable: unpromises(fn(readable)) }; } // src/peeks.ts function peeks(fn) { let i = 0; return new TransformStream({ transform: async (chunk, ctrl) => { ctrl.enqueue(chunk); const ret = fn(chunk, i++); ret instanceof Promise ? await ret : ret; } }); } // src/logs.ts function logs(mapFn = (s, _i) => s) { return bys(peeks(async (e, i) => { const ret = mapFn(e, i); const val = ret instanceof Promise ? await ret : ret; console.log(typeof val === "string" ? val.replace(/\n$/, "") : val); })); } // src/mapAddFields.ts function mapAddFields(key, fn) { let i = 0; return new TransformStream({ transform: async (chunk, ctrl) => ctrl.enqueue({ ...chunk, [key]: await fn(chunk, i++) }) }); } // src/mapMixins.ts function mapMixins(fn) { let i = 0; return new TransformStream({ transform: async (chunk, ctrl) => ctrl.enqueue({ ...chunk, ...await fn(chunk, i++) }) }); } // src/streamAsyncIterator.ts async function* streamAsyncIterator() { const reader = this.getReader(); try { while (true) { const { done, value } = await reader.read(); if (done) return; yield value; } } finally { reader.releaseLock(); } } // src/mergeStream.ts var mergeStream = (...srcs) => { if (!srcs.length) return new ReadableStream({ start: (c) => c.close() }); if (srcs.length === 1) return toStream(srcs[0]); const t = new TransformStream; const w = t.writable.getWriter(); const streams = srcs.map(toStream); Promise.all(streams.map(async (s) => { for await (const chunk of Object.assign(s, { [Symbol.asyncIterator]: streamAsyncIterator })) await w.write(chunk); })).then(async () => w.close()).catch((error) => { console.error(error); return Promise.all([ t.writable.abort(error), ...streams.map((e) => e.cancel(error)) ]); }); return t.readable; }; // src/emptyStream.ts var emptyStream = () => new ReadableStream({ start: (c) => c.close() }); // src/mergeStreamsBy.ts function mergeStreamsBy(transform, sources) { if (!sources) return (srcs) => mergeStreamsBy(transform, srcs); if (!sources.length) return emptyStream(); const streams = sources.map((s) => toStream(s)); const readers = streams.map((stream) => stream.getReader()); let slots = streams.map(() => null); return new ReadableStream({ pull: async (ctrl) => { await Promise.all(readers.map(async (reader, i) => slots[i] ??= await reader.read())); slots = await transform([...slots], ctrl); if (slots.length !== streams.length) phpdie_default("slot length mismatch"); } }); } function mergeStreamsByAscend(ordFn, sources) { if (!sources) return (sources2) => mergeStreamsByAscend(ordFn, sources2); let lastEmit = null; return mergeStreamsBy(async (slots, ctrl) => { const cands = slots.filter((e) => e?.done === false).map((e) => e?.value); if (!cands.length) { ctrl.close(); return []; } const peak = sortBy(ordFn, cands)[0]; const index = slots.findIndex((e) => e?.done === false && e?.value === peak); if (lastEmit && lastEmit.value !== sortBy(ordFn, [lastEmit.value, peak])[0] && ordFn(lastEmit.value) !== ordFn(peak)) throw new Error("MergeStreamError: one of sources is not ordered by ascending", { cause: { prevOrd: ordFn(lastEmit.value), currOrd: ordFn(peak), prev: lastEmit.value, curr: peak } }); lastEmit = { value: peak }; ctrl.enqueue(peak); return slots.toSpliced(index, 1, null); }, sources); } function mergeStreamsByDescend(ordFn, sources) { if (!sources) return (srcs) => mergeStreamsByDescend(ordFn, srcs); let lastEmit = null; return mergeStreamsBy(async (slots, ctrl) => { const cands = slots.filter((e) => e?.done === false).map((e) => e?.value); if (!cands.length) { ctrl.close(); return []; } const peak = sortBy(ordFn, cands).toReversed()[0]; const index = slots.findIndex((e) => e?.done === false && e?.value === peak); if (lastEmit && lastEmit.value !== sortBy(ordFn, [lastEmit.value, peak]).toReversed()[0] && ordFn(lastEmit.value) !== ordFn(peak)) phpdie_default(new Error("MergeStreamError: one of sources is not ordered by descending", { cause: { prevOrd: ordFn(lastEmit.value), currOrd: ordFn(peak), prev: lastEmit.value, curr: peak } })); lastEmit = { value: peak }; ctrl.enqueue(peak); return slots.toSpliced(index, 1, null); }, sources); } // src/wseMerges.ts var wseMerges = merge; // src/parallels.ts var parallels = (...srcs) => wseMerges()(from(srcs)); // src/merges.ts var merges = (...srcs) => { if (!srcs.length) return new TransformStream; const upstream = new TransformStream; return { writable: upstream.writable, readable: parallels(upstream.readable, ...srcs.map(toStream)) }; }; // src/pMaps.ts var pMaps = (fn, options = {}) => { let i = 0; const promises = []; return new TransformStream({ transform: async (chunk, ctrl) => { promises.push(fn(chunk, i++)); if (promises.length >= (options.concurrency ?? Infinity)) ctrl.enqueue(await promises.shift()); }, flush: async (ctrl) => { while (promises.length) ctrl.enqueue(await promises.shift()); } }); }; // src/portals.ts var portals = (arg) => { if (!arg) return new TransformStream; if (typeof arg !== "function") return throughs((s) => s.pipeThrough(arg)); const fn = arg; const { writable, readable } = new TransformStream; return { writable, readable: fn(readable) }; }; // src/reduceEmits.ts var reduceEmits = (fn, _state) => { let i = 0; return new TransformStream({ transform: async (chunk, ctrl) => { const { next, emit } = await fn(_state, chunk, i++); _state = next; ctrl.enqueue(emit); } }); }; // src/reduces.ts var reduces = (fn, state) => { let i = 0; return new TransformStream({ transform: async (chunk, ctrl) => { const ret = fn(state, chunk, i++); const val = ret instanceof Promise ? await ret : ret; state = await val; ctrl.enqueue(state); } }); }; // src/riffles.ts function riffles(sep) { let last2; return new TransformStream({ transform: (chunk, ctrl) => { if (last2 !== undefined) { ctrl.enqueue(last2); ctrl.enqueue(sep); } last2 = chunk; }, flush: (ctrl) => ctrl.enqueue(last2) }); } // src/skips.ts function skips(n = 1) { return new TransformStream({ transform: async (chunk, ctrl) => { if (n <= 0) ctrl.enqueue(chunk); else n--; } }); } // src/slices.ts function slices(start = 0, end = Infinity) { const count = end - start; const { readable, writable } = new TransformStream; return { writable, readable: readable.pipeThrough(skips(start)).pipeThrough(limits(count)) }; } // node_modules/string-replace-async/index.js function replaceAsync(string, searchValue, replacer) { try { if (typeof replacer === "function") { var values = []; String.prototype.replace.call(string, searchValue, function() { values.push(replacer.apply(undefined, arguments)); return ""; }); return Promise.all(values).then(function(resolvedValues) { return String.prototype.replace.call(string, searchValue, function() { return resolvedValues.shift(); }); }); } else { return Promise.resolve(String.prototype.replace.call(string, searchValue, replacer)); } } catch (error) { return Promise.reject(error); } } // src/strings.ts var matchs = (matcher) => { return new TransformStream({ transform: (chunk, ctrl) => ctrl.enqueue(chunk.match(matcher)) }); }; var matchAlls = (matcher) => { return new TransformStream({ transform: (chunk, ctrl) => ctrl.enqueue(chunk.matchAll(matcher)) }); }; var replaces = (searchValue, replacement) => { return maps((s) => typeof replacement === "string" ? s.replace(searchValue, replacement) : replaceAsync(s, searchValue, replacement)); }; var replaceAlls = (searchValue, replacement) => { return maps((s) => typeof replacement === "string" ? s.replaceAll(searchValue, replacement) : replaceAsync(s, searchValue, replacement)); }; // src/tails.ts function tails(n = 1) { const chunks2 = []; return new TransformStream({ transform: (chunk) => { chunks2.push(chunk); if (chunks2.length > n) chunks2.shift(); }, flush: (ctrl) => { chunks2.map((e) => ctrl.enqueue(e)); } }); } // src/takeWhiles.ts function takeWhiles(fn, { terminate = true } = {}) { let i = 0; let stopped = false; return new TransformStream({ transform: async (chunk, ctrl) => { if (stopped) return; const shouldContinue = await fn(chunk, i++); if (shouldContinue) { ctrl.enqueue(chunk); } else { stopped = true; if (terminate) { ctrl.terminate(); return never(); } } }, flush: () => {} }, { highWaterMark: 1 }, { highWaterMark: 0 }); } // src/tees.ts var tees = (arg) => { if (!arg) return new TransformStream; if (arg instanceof WritableStream) return tees((s) => s.pipeTo(arg)); const fn = arg; const { writable, readable } = new TransformStream; const [a, b] = readable.tee(); fn(a); return { writable, readable: b }; }; // src/terminates.ts function terminates(signal) { return throughs((r) => r.pipeThrough(new TransformStream, { signal })); } // src/throttles.ts function throttles(interval, { drop = false, keepLast = true } = {}) { let timerId = null; let cdPromise = Promise.withResolvers(); let lasts = []; return new TransformStream({ transform: async (chunk, ctrl) => { if (timerId) { if (keepLast) lasts = [chunk]; if (drop) return; await cdPromise.promise; } lasts = []; ctrl.enqueue(chunk); [cdPromise, timerId] = [ Promise.withResolvers(), setTimeout(() => { timerId = null; cdPromise.resolve(); }, interval) ]; }, flush: async (ctrl) => { while (timerId) await new Promise((r) => setTimeout(r, interval / 2)); lasts.map((e) => ctrl.enqueue(e)); } }); } // src/toLatest.ts function toLatests(r) { let latest; let nextPromise = Promise.withResolvers(); r.pipeTo(new WritableStream({ write: (value) => { latest = value; nextPromise.resolve(value); nextPromise = Promise.withResolvers(); }, close: () => { nextPromise.resolve(undefined); } })); return { get latest() { if (latest === undefined) { return nextPromise.promise; } return Promise.resolve(latest); }, get next() { return nextPromise.promise; } }; } // src/uniqs.ts var uniqs = () => { const set = new Set; return throughs((s) => s.pipeThrough(filters((x) => { if (set.has(x)) return false; set.add(x); return true; }))); }; var uniqBys = (keyFn) => { const set = new Set; return throughs((s) => s.pipeThrough(filters(async (x) => { const key = await keyFn(x); if (set.has(key)) return false; set.add(key); return true; }))); }; // src/unwinds.ts var import_unwind_array = __toESM(require_src()); function unwinds(key) { return flatMaps((e) => import_unwind_array.unwind(e, { path: key })); } // src/sflow.ts function sflow(...srcs) { let r = srcs.length === 1 ? toStream(srcs[0]) : concatStream(srcs); return Object.assign(r, { _type: null, get readable() { return r; }, portal: (...args) => sflow(r.pipeThrough(portals(...args))), through: (...args) => sflow(r.pipeThrough(_throughs(...args))), by: (...args) => sflow(r.pipeThrough(_throughs(...args))), byLazy: (t) => _byLazy(r, t), cacheSkip: (...args) => sflow(r).byLazy(cacheSkips(...args)), cacheList: (...args) => sflow(r).byLazy(cacheLists(...args)), cacheTail: (...args) => sflow(r).byLazy(cacheTails(...args)), chunkBy: (...args) => sflow(r.pipeThrough(chunkBys(...args))), chunkIf: (...args) => sflow(r.pipeThrough(chunkIfs(...args))), buffer: (...args) => sflow(r.pipeThrough(chunks(...args))), chunk: (...args) => sflow(r.pipeThrough(chunks(...args))), convolve: (...args) => sflow(r.pipeThrough(convolves(...args))), abort: (...args) => sflow(r.pipeThrough(terminates(...args))), chunkInterval: (...args) => sflow(r.pipeThrough(chunkIntervals(...args))), interval: (...args) => sflow(r.pipeThrough(chunkIntervals(...args))), debounce: (...args) => sflow(r.pipeThrough(debounces(...args))), filter: (...args) => sflow(r.pipeThrough(filters(...args))), find: (...args) => sflow(r.pipeThrough(finds(...args))), flatMap: (...args) => sflow(r.pipeThrough(flatMaps(...args))), flat: (...args) => sflow(r).by(flats(...args)), join: (...args) => sflow(r.pipeThrough(riffles(...args))), match: (...args) => sflow(r.pipeThrough(matchs(...args))), matchAll: (...args) => sflow(r.pipeThrough(matchAlls(...args))), replace: (...args) => sflow(r.pipeThrough(replaces(...args))), replaceAll: (...args) => sflow(r.pipeThrough(replaceAlls(...args))), merge: (...args) => sflow(r.pipeThrough(merges(...args))), concat: (srcs2) => sflow(r.pipeThrough(concats(srcs2))), confluence: (...args) => sflow(r.pipeThrough(confluences(...args))), confluenceByZip: () => sflow(r).by(confluences()), confluenceByConcat: () => sflow(r).by((srcs2) => concatStream(srcs2)), confluenceByParallel: () => sflow(r).by((srcs2) => sflow(srcs2).toArray().then((srcs3) => mergeStream(...srcs3))).confluence(), confluenceByAscend: (ordFn) => sflow(r).chunk().map((srcs2) => mergeStreamsByAscend(ordFn, srcs2)).confluence(), confluenceByDescend: (ordFn) => sflow(r).chunk().map((srcs2) => mergeStreamsByDescend(ordFn, srcs2)).confluence(), limit: (...args) => sflow(r).byLazy(limits(...args)), head: (...args) => sflow(r.pipeThrough(heads(...args))), map: (...args) => sflow(r.pipeThrough(maps(...args))), mapAddField: (...args) => sflow(r.pipeThrough(mapAddFields(...args))), mapMixin: (...args) => sflow(r.pipeThrough(mapMixins(...args))), log: (...args) => sflow(r.pipeThrough(logs(...args))), uniq: (...args) => sflow(r.pipeThrough(uniqs(...args))), uniqBy: (...args) => sflow(r.pipeThrough(uniqBys(...args))), unwind: (...args) => sflow(r.pipeThrough(unwinds(...args))), asyncMap: (...args) => sflow(r.pipeThrough(asyncMaps(...args))), pMap: (...args) => sflow(r.pipeThrough(pMaps(...args))), peek: (...args) => sflow(r.pipeThrough(peeks(...args))), riffle: (...args) => sflow(r.pipeThrough(riffles(...args))), forEach: (...args) => sflow(r.pipeThrough(forEachs(...args))), reduce: (...args) => sflow(r.pipeThrough(reduces(...args))), reduceEmit: (...args) => sflow(r.pipeThrough(reduceEmits(...args))), skip: (...args) => sflow(r.pipeThrough(skips(...args))), slice: (...args) => sflow(r.pipeThrough(slices(...args))), tail: (...args) => sflow(r.pipeThrough(tails(...args))), takeWhile: (...args) => sflow(r.pipeThrough(takeWhiles(...args))), tees: (...args) => sflow(r.pipeThrough(_tees(...args))), forkTo: (...args) => sflow(r.pipeThrough(_tees(...args))), fork: () => { let b; [r, b] = r.tee(); return sflow(b); }, throttle: (...args) => sflow(r.pipeThrough(throttles(...args))), preventAbort: () => sflow(r.pipeThrough(throughs(), { preventAbort: true })), preventClose: () => sflow(r.pipeThrough(throughs(), { preventClose: true })), preventCancel: () => sflow(r.pipeThrough(throughs(), { preventCancel: true })), onStart: (start) => sflow(r).by(new TransformStream({ start })), onTransform: (transform) => sflow(r).by(new TransformStream({ transform })), onFlush: (flush) => sflow(r).by(new Trans