sflow
Version:
sflow is a powerful and highly-extensible library designed for processing and manipulating streams of data effortlessly. Inspired by the functional programming paradigm, it provides a rich set of utilities for transforming streams, including chunking, fil
1,493 lines (1,422 loc) • 43.4 kB
JavaScript
var __defProp = Object.defineProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, {
get: all[name],
enumerable: true,
configurable: true,
set: (newValue) => all[name] = () => newValue
});
};
// src/andIgnoreError.ts
import DIE from "phpdie";
function andIgnoreError(regex) {
return (error) => error?.message?.match(regex) ? null : DIE(error);
}
// src/sflow.ts
import DIE7 from "phpdie";
import PolyfillTextEncoderStream from "polyfill-text-encoder-stream";
// src/asyncMaps.ts
var asyncMaps = (fn, options = {}) => {
let i = 0;
let tasks = new Map;
return new TransformStream({
transform: async (chunk, ctrl) => {
const id = i++;
tasks.set(id, async function() {
return fn(chunk, id);
}().then((data) => ({ id, data })));
if (tasks.size >= (options.concurrency ?? Infinity)) {
const { id: id2, data } = await Promise.race(tasks.values());
tasks.delete(id2);
ctrl.enqueue(data);
}
},
flush: async (ctrl) => {
while (tasks.size) {
const { id, data } = await Promise.race(tasks.values());
tasks.delete(id);
ctrl.enqueue(data);
}
}
});
};
// src/cacheLists.ts
import DIE2 from "phpdie";
// src/never.ts
var never = () => new Promise(() => null);
// src/cacheLists.ts
function cacheLists(store, _options) {
const { key = new Error().stack ?? DIE2("missing cache key") } = typeof _options === "string" ? { key: _options } : _options ?? {};
const chunks = [];
const cacheHitPromise = store.has?.(key) || store.get(key);
let hitflag = false;
return new TransformStream({
start: async (ctrl) => {
if (!await cacheHitPromise)
return;
const cached = await store.get(key);
if (!cached)
return;
cached.map((c) => ctrl.enqueue(c));
hitflag = true;
},
transform: async (chunk, ctrl) => {
if (await cacheHitPromise || hitflag) {
ctrl.terminate();
return never();
}
chunks.push(chunk);
ctrl.enqueue(chunk);
},
flush: async () => await store.set(key, chunks)
});
}
// src/cacheSkips.ts
import DIE3 from "phpdie";
function cacheSkips(store, _options) {
const {
key = new Error().stack ?? DIE3("missing cache key"),
windowSize = 1
} = typeof _options === "string" ? { key: _options } : _options ?? {};
const chunks = [];
const cachePromise = store.get(key);
return new TransformStream({
transform: async (chunk, ctrl) => {
const cache = await cachePromise;
const chunked = JSON.stringify(chunk);
const inf404 = (idx) => idx == null || idx < 0 ? Infinity : idx;
const hitCache = (item) => JSON.stringify(item) === chunked;
const cachedContents = cache?.slice(inf404(cache.findIndex(hitCache)));
if (cachedContents?.length) {
await store.set(key, [...chunks, ...cachedContents].slice(0, windowSize));
ctrl.terminate();
return await never();
}
chunks.push(chunk);
ctrl.enqueue(chunk);
},
flush: async () => {
await store.set(key, chunks.slice(0, windowSize));
}
});
}
// src/cacheTails.ts
import DIE4 from "phpdie";
import { equals } from "rambda";
function cacheTails(store, _options) {
const { key = new Error().stack ?? DIE4("missing cache key") } = typeof _options === "string" ? { key: _options } : _options ?? {};
const chunks = [];
const cachePromise = Promise.withResolvers();
const t = new TransformStream;
const w = t.writable.getWriter();
const writable = new WritableStream({
start: async () => cachePromise.resolve(await store.get(key)),
write: async (chunk, ctrl) => {
const cache = await cachePromise.promise;
if (cache && equals(chunk, cache[0])) {
await store.set(key, [...chunks, ...cache]);
for await (const item of cache)
await w.write(item);
await w.close();
ctrl.error(new Error("cached"));
return await never();
}
chunks.push(chunk);
await w.write(chunk);
},
close: async () => {
await store.set(key, [...chunks]);
await w.close();
},
abort: () => w.abort()
});
return { writable, readable: t.readable };
}
// src/chunkBys.ts
function chunkBys(compareFn) {
let chunks = [];
let lastOrder;
return new TransformStream({
transform: async (chunk, ctrl) => {
const order = await compareFn(chunk);
if (lastOrder && lastOrder !== order)
ctrl.enqueue(chunks.splice(0, Infinity));
chunks.push(chunk);
lastOrder = order;
},
flush: async (ctrl) => void (chunks.length && ctrl.enqueue(chunks))
});
}
// src/chunkIfs.ts
function chunkIfs(predicate, { inclusive = false } = {}) {
let chunks = [];
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => {
const cond = await predicate(chunk, i++, chunks);
if (!inclusive && !cond)
chunks.length && ctrl.enqueue(chunks.splice(0, Infinity));
chunks.push(chunk);
if (!cond)
ctrl.enqueue(chunks.splice(0, Infinity));
},
flush: async (ctrl) => void (chunks.length && ctrl.enqueue(chunks))
});
}
// src/chunkIntervals.ts
function chunkIntervals(interval = 0) {
let chunks = [];
let id = null;
return new TransformStream({
start: (ctrl) => {
id = setInterval(() => ctrl.enqueue(chunks.splice(0, Infinity)), interval);
},
transform: async (chunk) => {
chunks.push(chunk);
},
flush: async (ctrl) => {
if (chunks.length)
ctrl.enqueue(chunks.splice(0, Infinity));
id !== null && clearInterval(id);
}
});
}
// src/chunks.ts
function chunks(n = Infinity) {
let chunks2 = [];
if (n <= 0)
throw new Error("Buffer size must be greater than 0");
return new TransformStream({
transform: async (chunk, ctrl) => {
chunks2.push(chunk);
if (chunks2.length >= n)
ctrl.enqueue(chunks2.splice(0, Infinity));
},
flush: async (ctrl) => void (chunks2.length && ctrl.enqueue(chunks2))
});
}
// src/wse.ts
import { toArray } from "web-streams-extensions";
import { toPromise } from "web-streams-extensions";
import { merge } from "web-streams-extensions";
import { from } from "web-streams-extensions";
// src/froms.ts
var toStream = (src) => src instanceof ReadableStream ? src : from(src ?? []);
// src/maps.ts
function maps(fn) {
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => {
const ret = fn(chunk, i++);
const val = ret instanceof Promise ? await ret : ret;
ctrl.enqueue(val);
}
});
}
// src/nils.ts
function nils() {
return new WritableStream;
}
function nil() {
return null;
}
// src/concats.ts
var concats = (srcs) => {
if (!srcs)
return new TransformStream;
const upstream = new TransformStream;
return {
writable: upstream.writable,
readable: concatStream([upstream.readable, concatStream(srcs)])
};
};
var concatStream = (srcs) => {
if (!srcs)
return new ReadableStream({ start: (c) => c.close() });
const t = new TransformStream;
const w = t.writable.getWriter();
toStream(srcs).pipeThrough(maps(toStream)).pipeThrough(maps(async (s) => {
const r = s.getReader();
while (true) {
const { value, done } = await r.read();
if (done)
break;
await w.write(value);
}
})).pipeTo(nils()).then(() => w.close()).catch((reason) => w.abort(reason));
return t.readable;
};
// src/confluences.ts
import DIE5 from "phpdie";
var confluences = ({
order = "breadth"
} = {}) => {
const baseError = new Error;
if (order !== "breadth")
DIE5("not implemented");
const { writable, readable: sources } = new TransformStream;
const srcsQueue = [];
const readable = new ReadableStream({
async pull(ctrl) {
while (true) {
const src = await async function() {
const r2 = sources.getReader();
const { done: done2, value: src2 } = await r2.read();
r2.releaseLock();
if (done2)
return srcsQueue.shift();
return src2;
}();
if (!src)
return ctrl.close();
const r = src.getReader();
const { done, value } = await r.read();
r.releaseLock();
if (done)
continue;
srcsQueue.push(src);
ctrl.enqueue(value);
return;
}
}
});
return { writable, readable };
};
// src/convolves.ts
function convolves(n) {
const buffer = [];
return new TransformStream({
transform(chunk, controller) {
buffer.push(chunk);
if (buffer.length > n)
buffer.shift();
if (buffer.length === n)
controller.enqueue([...buffer]);
},
flush(controller) {
while (buffer.length > 1) {
buffer.shift();
if (buffer.length === n)
controller.enqueue([...buffer]);
}
}
});
}
// src/debounces.ts
function debounces(t) {
let id = null;
return new TransformStream({
transform: async (chunk, ctrl) => {
if (id)
clearTimeout(id);
id = setTimeout(() => {
ctrl.enqueue(chunk);
id = null;
}, t);
},
flush: async () => {
while (id)
await new Promise((r) => setTimeout(r, t / 2));
}
});
}
// src/filters.ts
var filters = (fn) => {
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => {
if (fn) {
const shouldEnqueue = await fn(chunk, i++);
if (shouldEnqueue)
ctrl.enqueue(chunk);
} else {
const isNull = chunk === undefined || chunk === null;
if (!isNull)
ctrl.enqueue(chunk);
}
}
});
};
// src/finds.ts
function finds(predicate) {
let index = 0;
let found = false;
return new TransformStream({
async transform(chunk, controller) {
if (found)
return;
const shouldEmit = await predicate(chunk, index++);
if (shouldEmit) {
found = true;
controller.enqueue(chunk);
controller.terminate();
}
}
});
}
// src/flatMaps.ts
function flatMaps(fn) {
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => {
const ret = fn(chunk, i++);
const val = ret instanceof Promise ? await ret : ret;
val.map((e) => ctrl.enqueue(e));
}
});
}
// src/flats.ts
function flats() {
return exports_sf.composers(exports_sf.filters((e) => e.length)).by(new TransformStream({
transform: async (a, ctrl) => {
a.map((e) => ctrl.enqueue(e));
}
}));
}
// src/forEachs.ts
function forEachs(fn) {
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => {
const ret = fn(chunk, i++);
ret instanceof Promise && await ret;
ctrl.enqueue(chunk);
}
});
}
// src/heads.ts
function heads(n = 1) {
return new TransformStream({
transform: async (chunk, ctrl) => {
return n-- > 0 ? ctrl.enqueue(chunk) : await never();
}
});
}
// src/limits.ts
function limits(n, { terminate = true } = {}) {
return new TransformStream({
transform: async (chunk, ctrl) => {
ctrl.enqueue(chunk);
if (--n === 0) {
terminate && ctrl.terminate();
return never();
}
},
flush: () => {}
}, { highWaterMark: 1 }, { highWaterMark: 0 });
}
// src/throughs.ts
var throughs = (arg) => {
if (!arg)
return new TransformStream;
if (typeof arg !== "function")
return throughs((s) => s.pipeThrough(arg));
const fn = arg;
const { writable, readable } = new TransformStream;
return { writable, readable: fn(readable) };
};
// src/lines.ts
var lines = ({ EOL = "KEEP" } = {}) => {
const CRLFMap = {
KEEP: "$1",
LF: `
`,
CRLF: `\r
`,
NONE: ""
};
return throughs((r) => r.pipeThrough(flatMaps((s) => s.split(/(?<=\n)/g))).pipeThrough(chunkIfs((ch) => ch.indexOf(`
`) === -1, { inclusive: true })).pipeThrough(maps((chunks2) => chunks2.join("").replace(/(\r?\n?)$/, CRLFMap[EOL]))));
};
// src/unpromises.ts
function unpromises(promise) {
const tr = new TransformStream;
(async function() {
const s = await promise;
await s.pipeTo(tr.writable);
})().catch((error) => {
tr.readable.cancel(error).catch(() => {
throw error;
});
}).then();
return tr.readable;
}
// src/bys.ts
function bys(arg) {
if (!arg)
return new TransformStream;
if (typeof arg !== "function")
return bys((s) => s.pipeThrough(arg));
const fn = arg;
const { writable, readable } = new TransformStream;
return { writable, readable: unpromises(fn(readable)) };
}
// src/peeks.ts
function peeks(fn) {
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => {
ctrl.enqueue(chunk);
const ret = fn(chunk, i++);
const val = ret instanceof Promise ? await ret : ret;
}
});
}
// src/logs.ts
function logs(mapFn = (s, i) => s) {
return bys(peeks(async (e, i) => {
const ret = mapFn(e, i);
const val = ret instanceof Promise ? await ret : ret;
console.log(typeof val === "string" ? val.replace(/\n$/, "") : val);
}));
}
// src/mapAddFields.ts
function mapAddFields(key, fn) {
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => ctrl.enqueue({ ...chunk, [key]: await fn(chunk, i++) })
});
}
// src/wseMerges.ts
var wseMerges = merge;
// src/parallels.ts
var parallels = (...srcs) => wseMerges()(from(srcs));
// src/merges.ts
var merges = (...srcs) => {
if (!srcs.length)
return new TransformStream;
const upstream = new TransformStream;
return {
writable: upstream.writable,
readable: parallels(upstream.readable, ...srcs.map(toStream))
};
};
// src/streamAsyncIterator.ts
async function* streamAsyncIterator() {
const reader = this.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done)
return;
yield value;
}
} finally {
reader.releaseLock();
}
}
// src/mergeStream.ts
var mergeStream = (...srcs) => {
if (!srcs.length)
return new ReadableStream({ start: (c) => c.close() });
if (srcs.length === 1)
return toStream(srcs[0]);
const t = new TransformStream;
const w = t.writable.getWriter();
const streams = srcs.map(toStream);
Promise.all(streams.map(async (s) => {
for await (const chunk of Object.assign(s, {
[Symbol.asyncIterator]: streamAsyncIterator
}))
await w.write(chunk);
})).then(async () => w.close()).catch((error) => {
console.error(error);
return Promise.all([
t.writable.abort(error),
...streams.map((e) => e.cancel(error))
]);
});
return t.readable;
};
// src/mergeStreamsBy.ts
import DIE6 from "phpdie";
import { sortBy } from "rambda";
// src/emptyStream.ts
var emptyStream = () => new ReadableStream({ start: (c) => c.close() });
// src/mergeStreamsBy.ts
function mergeStreamsBy(transform, sources) {
if (!sources)
return (srcs) => mergeStreamsBy(transform, srcs);
if (!sources.length)
return emptyStream();
const streams = sources.map((s) => toStream(s));
const readers = streams.map((stream) => stream.getReader());
let slots = streams.map(() => null);
return new ReadableStream({
pull: async (ctrl) => {
const results = await Promise.all(readers.map(async (reader, i) => slots[i] ??= await reader.read()));
slots = await transform([...slots], ctrl);
if (slots.length !== streams.length)
DIE6("slot length mismatch");
}
});
}
function mergeStreamsByAscend(ordFn, sources) {
if (!sources)
return (sources2) => mergeStreamsByAscend(ordFn, sources2);
let lastEmit = null;
return mergeStreamsBy(async (slots, ctrl) => {
const cands = slots.filter((e) => e?.done === false).map((e) => e.value);
if (!cands.length) {
ctrl.close();
return [];
}
const peak = sortBy(ordFn, cands)[0];
const index = slots.findIndex((e) => e?.done === false && e?.value === peak);
if (lastEmit && lastEmit.value !== sortBy(ordFn, [lastEmit.value, peak])[0] && ordFn(lastEmit.value) !== ordFn(peak))
throw new Error("MergeStreamError: one of sources is not ordered by ascending", {
cause: {
prevOrd: ordFn(lastEmit.value),
currOrd: ordFn(peak),
prev: lastEmit.value,
curr: peak
}
});
lastEmit = { value: peak };
ctrl.enqueue(peak);
return slots.toSpliced(index, 1, null);
}, sources);
}
function mergeStreamsByDescend(ordFn, sources) {
if (!sources)
return (srcs) => mergeStreamsByDescend(ordFn, srcs);
let lastEmit = null;
return mergeStreamsBy(async (slots, ctrl) => {
const cands = slots.filter((e) => e?.done === false).map((e) => e.value);
if (!cands.length) {
ctrl.close();
return [];
}
const peak = sortBy(ordFn, cands).toReversed()[0];
const index = slots.findIndex((e) => e?.done === false && e?.value === peak);
if (lastEmit && lastEmit.value !== sortBy(ordFn, [lastEmit.value, peak]).toReversed()[0] && ordFn(lastEmit.value) !== ordFn(peak))
DIE6(new Error("MergeStreamError: one of sources is not ordered by descending", {
cause: {
prevOrd: ordFn(lastEmit.value),
currOrd: ordFn(peak),
prev: lastEmit.value,
curr: peak
}
}));
lastEmit = { value: peak };
ctrl.enqueue(peak);
return slots.toSpliced(index, 1, null);
}, sources);
}
// src/pMaps.ts
var pMaps = (fn, options = {}) => {
let i = 0;
let promises = [];
return new TransformStream({
transform: async (chunk, ctrl) => {
promises.push(fn(chunk, i++));
if (promises.length >= (options.concurrency ?? Infinity))
ctrl.enqueue(await promises.shift());
},
flush: async (ctrl) => {
while (promises.length)
ctrl.enqueue(await promises.shift());
}
});
};
// src/portals.ts
var portals = (arg) => {
if (!arg)
return new TransformStream;
if (typeof arg !== "function")
return throughs((s) => s.pipeThrough(arg));
const fn = arg;
const { writable, readable } = new TransformStream;
return { writable, readable: fn(readable) };
};
// src/reduceEmits.ts
var reduceEmits = (fn, _state) => {
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => {
const { next, emit } = await fn(_state, chunk, i++);
_state = next;
ctrl.enqueue(emit);
}
});
};
// src/reduces.ts
var reduces = (fn, state) => {
let i = 0;
return new TransformStream({
transform: async (chunk, ctrl) => {
const ret = fn(state, chunk, i++);
const val = ret instanceof Promise ? await ret : ret;
state = await val;
ctrl.enqueue(state);
}
});
};
// src/riffles.ts
function riffles(sep) {
let last;
return new TransformStream({
transform: (chunk, ctrl) => {
if (last !== undefined) {
ctrl.enqueue(last);
ctrl.enqueue(sep);
}
last = chunk;
},
flush: (ctrl) => ctrl.enqueue(last)
});
}
// src/skips.ts
function skips(n = 1) {
return new TransformStream({
transform: async (chunk, ctrl) => {
if (n <= 0)
ctrl.enqueue(chunk);
else
n--;
}
});
}
// src/slices.ts
function slices(start = 0, end = Infinity) {
const count = end - start;
const { readable, writable } = new TransformStream;
return {
writable,
readable: readable.pipeThrough(skips(start)).pipeThrough(limits(count))
};
}
// src/strings.ts
import replaceAsync from "string-replace-async";
var matchs = (matcher) => {
return new TransformStream({
transform: (chunk, ctrl) => ctrl.enqueue(chunk.match(matcher))
});
};
var matchAlls = (matcher) => {
return new TransformStream({
transform: (chunk, ctrl) => ctrl.enqueue(chunk.matchAll(matcher))
});
};
var replaces = (searchValue, replacement) => {
return maps((s) => typeof replacement === "string" ? s.replace(searchValue, replacement) : replaceAsync(s, searchValue, replacement));
};
var replaceAlls = (searchValue, replacement) => {
return maps((s) => typeof replacement === "string" ? s.replaceAll(searchValue, replacement) : replaceAsync(s, searchValue, replacement));
};
// src/tails.ts
function tails(n = 1) {
let chunks2 = [];
return new TransformStream({
transform: (chunk) => {
chunks2.push(chunk);
if (chunks2.length > n)
chunks2.shift();
},
flush: (ctrl) => {
chunks2.map((e) => ctrl.enqueue(e));
}
});
}
// src/tees.ts
var tees = (arg) => {
if (!arg)
return new TransformStream;
if (arg instanceof WritableStream)
return tees((s) => s.pipeTo(arg));
const fn = arg;
const { writable, readable } = new TransformStream;
const [a, b] = readable.tee();
fn(a);
return { writable, readable: b };
};
// src/terminates.ts
function terminates(signal) {
return throughs((r) => r.pipeThrough(new TransformStream, { signal }));
}
// src/throttles.ts
function throttles(interval, { drop = false, keepLast = true } = {}) {
let timerId = null;
let cdPromise = Promise.withResolvers();
let lasts = [];
return new TransformStream({
transform: async (chunk, ctrl) => {
if (timerId) {
if (keepLast)
lasts = [chunk];
if (drop)
return;
await cdPromise.promise;
}
lasts = [];
ctrl.enqueue(chunk);
[cdPromise, timerId] = [
Promise.withResolvers(),
setTimeout(() => {
timerId = null;
cdPromise.resolve();
}, interval)
];
},
flush: async (ctrl) => {
while (timerId)
await new Promise((r) => setTimeout(r, interval / 2));
lasts.map((e) => ctrl.enqueue(e));
}
});
}
// src/uniqs.ts
var uniqs = () => {
const set = new Set;
return throughs((s) => s.pipeThrough(filters((x) => {
if (set.has(x))
return false;
set.add(x);
return true;
})));
};
var uniqBys = (keyFn) => {
const set = new Set;
return throughs((s) => s.pipeThrough(filters(async (x) => {
const key = await keyFn(x);
if (set.has(key))
return false;
set.add(key);
return true;
})));
};
// src/unwinds.ts
import { unwind } from "unwind-array";
function unwinds(key) {
return flatMaps((e) => unwind(e, { path: key }));
}
// src/xsvStreams.ts
import { csvFormatBody, csvParse, tsvFormatBody, tsvParse } from "d3";
function csvFormats(header) {
const _header = typeof header === "string" ? header.split(",") : header;
return new TransformStream({
start: (ctrl) => ctrl.enqueue(_header.join(",") + `
`),
transform: (chunk, ctrl) => ctrl.enqueue(csvFormatBody([chunk], _header) + `
`)
});
}
function csvParses(header) {
const _header = typeof header === "string" ? header.split(",") : header;
let i = 0;
return throughs((r) => r.pipeThrough(lines({ EOL: "LF" })).pipeThrough(skips(1)).pipeThrough(maps((line) => csvParse(_header + `
` + line)[0])));
}
function tsvFormats(header) {
const sep = "\t";
const _header = typeof header === "string" ? header.split(sep) : header;
return new TransformStream({
start: (ctrl) => ctrl.enqueue(_header.join(sep) + `
`),
transform: (chunk, ctrl) => ctrl.enqueue(tsvFormatBody([chunk], _header) + `
`)
});
}
function tsvParses(header) {
const _header = typeof header === "string" ? header.split("\t") : header;
let i = 0;
return throughs((r) => r.pipeThrough(lines({ EOL: "LF" })).pipeThrough(skips(1)).pipeThrough(maps((line) => tsvParse(_header + `
` + line)[0])));
}
// src/toLatest.ts
function toLatests(r) {
let latest;
let nextPromise = Promise.withResolvers();
r.pipeTo(new WritableStream({
write: (value) => {
latest = value;
nextPromise.resolve(value);
nextPromise = Promise.withResolvers();
},
close: () => {
nextPromise.resolve(undefined);
}
}));
return {
get latest() {
if (latest === undefined) {
return nextPromise.promise;
}
return Promise.resolve(latest);
},
get next() {
return nextPromise.promise;
}
};
}
// src/sflow.ts
function sflow(...srcs) {
let r = srcs.length === 1 ? toStream(srcs[0]) : concatStream(srcs);
return Object.assign(r, {
_type: null,
get readable() {
return r;
},
portal: (...args) => sflow(r.pipeThrough(portals(...args))),
through: (...args) => sflow(r.pipeThrough(_throughs(...args))),
by: (...args) => sflow(r.pipeThrough(_throughs(...args))),
byLazy: (t) => _byLazy(r, t),
mapAddField: (...args) => sflow(r.pipeThrough(mapAddFields(...args))),
cacheSkip: (...args) => sflow(r).byLazy(cacheSkips(...args)),
cacheList: (...args) => sflow(r).byLazy(cacheLists(...args)),
cacheTail: (...args) => sflow(r).byLazy(cacheTails(...args)),
chunkBy: (...args) => sflow(r.pipeThrough(chunkBys(...args))),
chunkIf: (...args) => sflow(r.pipeThrough(chunkIfs(...args))),
buffer: (...args) => sflow(r.pipeThrough(chunks(...args))),
chunk: (...args) => sflow(r.pipeThrough(chunks(...args))),
convolve: (...args) => sflow(r.pipeThrough(convolves(...args))),
abort: (...args) => sflow(r.pipeThrough(terminates(...args))),
chunkInterval: (...args) => sflow(r.pipeThrough(chunkIntervals(...args))),
interval: (...args) => sflow(r.pipeThrough(chunkIntervals(...args))),
debounce: (...args) => sflow(r.pipeThrough(debounces(...args))),
filter: (...args) => sflow(r.pipeThrough(filters(...args))),
find: (...args) => sflow(r.pipeThrough(finds(...args))),
flatMap: (...args) => sflow(r.pipeThrough(flatMaps(...args))),
flat: (...args) => sflow(r).by(flats(...args)),
join: (...args) => sflow(r.pipeThrough(riffles(...args))),
match: (...args) => sflow(r.pipeThrough(matchs(...args))),
matchAll: (...args) => sflow(r.pipeThrough(matchAlls(...args))),
replace: (...args) => sflow(r.pipeThrough(replaces(...args))),
replaceAll: (...args) => sflow(r.pipeThrough(replaceAlls(...args))),
merge: (...args) => sflow(r.pipeThrough(merges(...args))),
concat: (srcs2) => sflow(r.pipeThrough(concats(srcs2))),
confluence: (...args) => sflow(r.pipeThrough(confluences(...args))),
confluenceByZip: () => sflow(r).by(confluences()),
confluenceByConcat: () => sflow(r).by((srcs2) => concatStream(srcs2)),
confluenceByParallel: () => sflow(r).by((srcs2) => sflow(srcs2).toArray().then((srcs3) => mergeStream(...srcs3))).confluence(),
confluenceByAscend: (ordFn) => sflow(r).chunk().map((srcs2) => mergeStreamsByAscend(ordFn, srcs2)).confluence(),
confluenceByDescend: (ordFn) => sflow(r).chunk().map((srcs2) => mergeStreamsByDescend(ordFn, srcs2)).confluence(),
limit: (...args) => sflow(r).byLazy(limits(...args)),
head: (...args) => sflow(r.pipeThrough(heads(...args))),
map: (...args) => sflow(r.pipeThrough(maps(...args))),
log: (...args) => sflow(r.pipeThrough(logs(...args))),
uniq: (...args) => sflow(r.pipeThrough(uniqs(...args))),
uniqBy: (...args) => sflow(r.pipeThrough(uniqBys(...args))),
unwind: (...args) => sflow(r.pipeThrough(unwinds(...args))),
asyncMap: (...args) => sflow(r.pipeThrough(asyncMaps(...args))),
pMap: (...args) => sflow(r.pipeThrough(pMaps(...args))),
peek: (...args) => sflow(r.pipeThrough(peeks(...args))),
riffle: (...args) => sflow(r.pipeThrough(riffles(...args))),
forEach: (...args) => sflow(r.pipeThrough(forEachs(...args))),
reduce: (...args) => sflow(r.pipeThrough(reduces(...args))),
reduceEmit: (...args) => sflow(r.pipeThrough(reduceEmits(...args))),
skip: (...args) => sflow(r.pipeThrough(skips(...args))),
slice: (...args) => sflow(r.pipeThrough(slices(...args))),
tail: (...args) => sflow(r.pipeThrough(tails(...args))),
tees: (...args) => sflow(r.pipeThrough(_tees(...args))),
forkTo: (...args) => sflow(r.pipeThrough(_tees(...args))),
fork: () => {
let b;
[r, b] = r.tee();
return sflow(b);
},
throttle: (...args) => sflow(r.pipeThrough(throttles(...args))),
csvFormat: (...args) => sflow(r.pipeThrough(csvFormats(...args))),
tsvFormat: (...args) => sflow(r.pipeThrough(tsvFormats(...args))),
csvParse: (...args) => sflow(r.pipeThrough(csvParses(...args))),
tsvParse: (...args) => sflow(r.pipeThrough(tsvParses(...args))),
preventAbort: () => sflow(r.pipeThrough(throughs(), { preventAbort: true })),
preventClose: () => sflow(r.pipeThrough(throughs(), { preventClose: true })),
preventCancel: () => sflow(r.pipeThrough(throughs(), { preventCancel: true })),
onStart: (start) => sflow(r).by(new TransformStream({ start })),
onTransform: (transform) => sflow(r).by(new TransformStream({ transform })),
onFlush: (flush) => sflow(r).by(new TransformStream({ flush })),
done: () => r.pipeTo(nils()),
end: (dst = nils()) => r.pipeTo(dst),
to: (dst = nils()) => r.pipeTo(dst),
run: () => r.pipeTo(nils()),
toEnd: () => r.pipeTo(nils()),
toNil: () => r.pipeTo(nils()),
toArray: () => toArray(r),
toCount: async () => {
let i = 0;
const d = r.getReader();
while (!(await d.read()).done)
i++;
return i;
},
toFirst: () => toPromise(sflow(r).limit(1, { terminate: true })),
toFirstMatch: (predicate) => toPromise(sflow(r).find(predicate)),
toLast: () => toPromise(sflow(r).tail(1)),
toExactlyOne: async () => {
const a = await toArray(r);
a.length !== 1 || DIE7(`Expect exactly 1 Item, but got ${a.length}`);
return a[0];
},
toOne: async () => {
const a = await toArray(r);
if (a.length > 1)
DIE7(`Expect only 1 Item, but got ${a.length}`);
return a[0];
},
toAtLeastOne: async () => {
const a = await toArray(r);
if (a.length > 1)
DIE7(`Expect only 1 Item, but got ${a.length}`);
if (a.length < 1)
DIE7(`Expect at least 1 Item, but got ${a.length}`);
return a[0];
},
toLatest: () => toLatests(sflow(r)),
toLog: (...args) => sflow(r.pipeThrough(logs(...args))).done(),
lines: (...args) => sflow(r.pipeThrough(lines(...args))),
toResponse: (init) => new Response(r, init),
text: (init) => new Response(r.pipeThrough(new PolyfillTextEncoderStream), init).text(),
json: (init) => new Response(r.pipeThrough(new PolyfillTextEncoderStream), init).json(),
blob: (init) => new Response(sflow(r), init).blob(),
arrayBuffer: (init) => new Response(r, init).arrayBuffer(),
[Symbol.asyncIterator]: streamAsyncIterator
});
}
var _tees = (arg) => {
if (!arg)
return new TransformStream;
if (arg instanceof WritableStream)
return tees((s) => s.pipeTo(arg));
const fn = arg;
const { writable, readable } = new TransformStream;
const [a, b] = readable.tee();
fn(sflow(a));
return { writable, readable: b };
};
var _throughs = (arg) => {
if (!arg)
return new TransformStream;
if (typeof arg !== "function")
return throughs((s) => s.pipeThrough(arg));
const fn = arg;
const { writable, readable } = new TransformStream;
return { writable, readable: sflow(fn(sflow(readable))) };
};
function _byLazy(r, t) {
const reader = r.getReader();
const tw = t.writable.getWriter();
const tr = t.readable.getReader();
return sflow(new ReadableStream({
start: async (ctrl) => {
(async function() {
while (true) {
const { done, value } = await tr.read();
if (done)
return ctrl.close();
ctrl.enqueue(value);
}
})();
},
pull: async (ctrl) => {
const { done, value } = await reader.read();
if (done)
return tw.close();
await tw.write(value);
},
cancel: async (r2) => {
reader.cancel(r2);
tr.cancel(r2);
}
}, { highWaterMark: 0 }));
}
// src/index.ts
import { default as default4 } from "polyfill-text-decoder-stream";
import { default as default5 } from "polyfill-text-encoder-stream";
// src/chunkOverlaps.ts
function chunkOverlaps({
step,
overlap
}) {
let chunks2 = [];
if (step <= 0)
throw new Error("step must be greater than 0");
if (overlap < 0)
throw new Error("overlap must be greater than or equal to 0");
return new TransformStream({
transform: async (chunk, ctrl) => {
chunks2.push(chunk);
if (chunks2.length >= step + overlap)
ctrl.enqueue([...chunks2.splice(0, step), ...chunks2.slice(0, overlap)]);
},
flush: async (ctrl) => void (chunks2.length && ctrl.enqueue(chunks2))
});
}
// src/chunkTransforms.ts
function chunkTransforms(options) {
let chunks2 = [];
const { start, transform, flush } = options;
return new TransformStream({
start: async (ctrl) => {
if (start)
chunks2 = await start(chunks2, ctrl);
},
transform: async (chunk, ctrl) => {
chunks2.push(chunk);
if (transform)
chunks2 = await transform(chunks2, ctrl);
},
flush: async (ctrl) => {
if (flush)
chunks2 = await flush(chunks2, ctrl);
}
});
}
// src/distributeBys.ts
var distributeBys = (groupFn) => {
const streams = new Map;
const { writable: srcs, readable } = new TransformStream;
const { writable, readable: chunks2 } = new TransformStream;
const w = srcs.getWriter();
chunks2.pipeThrough(pMaps(async (chunk) => {
const ord = await groupFn(chunk);
if (!streams.has(ord))
await async function() {
const t2 = new TransformStream;
await w.write(t2.readable);
const r = { ...t2, writer: t2.writable.getWriter() };
streams.set(ord, r);
return r;
}();
const t = streams.get(ord);
await t.writer.write(chunk);
})).pipeTo(nils()).finally(() => {
w.close();
[...streams.values()].map((e) => e.writer.close());
});
return { writable, readable };
};
// src/mergeAscends.ts
import DIE8 from "phpdie";
import { sortBy as sortBy2 } from "rambda";
var mergeAscends = (ordFn, _srcs) => {
if (!_srcs)
return (srcs) => mergeAscends(ordFn, srcs);
return sflow(new ReadableStream({
pull: async (ctrl) => {
const srcs = await sflow(_srcs).toArray();
const slots = srcs.map(() => {
return;
});
const pendingSlotRemoval = srcs.map(() => {
return;
});
const drains = srcs.map(() => false);
let lastMinValue = undefined;
await Promise.all(srcs.map(async (src, i) => {
for await (const value of sflow(src)) {
while (slots[i] !== undefined) {
if (shiftMinValueIfFull())
continue;
pendingSlotRemoval[i] = Promise.withResolvers();
await pendingSlotRemoval[i].promise;
}
slots[i] = { value };
shiftMinValueIfFull();
}
drains[i] = true;
pendingSlotRemoval.map((e) => e?.resolve());
await Promise.all(pendingSlotRemoval.map((e) => e?.promise));
const allDrain = drains.every(Boolean);
if (allDrain) {
while (slots.some((e) => e !== undefined))
shiftMinValueIfFull();
ctrl.close();
}
function shiftMinValueIfFull() {
const isFull = slots.every((slot, i2) => slot !== undefined || drains[i2]);
if (!isFull)
return false;
const fullSlots = slots.flatMap((e) => e !== undefined ? [e] : []).map((e) => e.value);
const minValue = sortBy2(ordFn, fullSlots)[0];
const minIndex = slots.findIndex((e) => e?.value === minValue);
if (lastMinValue !== undefined) {
const ordered = sortBy2(ordFn, [lastMinValue, minValue]);
ordered[0] === lastMinValue && ordered[1] === minValue || DIE8(`
MergeAscendError: one of source stream is not ascending ordered.
stream index: ${minIndex}
prev: ${ordFn(lastMinValue)}
prev: ${JSON.stringify(lastMinValue)}
curr: ${ordFn(minValue)}
curr: ${JSON.stringify(minValue)}
`);
}
lastMinValue = minValue;
ctrl.enqueue(minValue);
slots[minIndex] = undefined;
pendingSlotRemoval[minIndex]?.resolve();
pendingSlotRemoval[minIndex] = undefined;
return true;
}
}));
}
}, { highWaterMark: 0 }));
};
var mergeDescends = (ordFn, _srcs) => {
if (!_srcs)
return (srcs) => mergeDescends(ordFn, srcs);
return toStream(new ReadableStream({
pull: async (ctrl) => {
const srcs = await sflow(_srcs).toArray();
const slots = srcs.map(() => {
return;
});
const pendingSlotRemoval = srcs.map(() => {
return;
});
const drains = srcs.map(() => false);
let lastMaxValue = undefined;
await Promise.all(srcs.map(async (src, i) => {
for await (const value of toStream(src)) {
while (slots[i] !== undefined) {
if (shiftMaxValueIfFull())
continue;
pendingSlotRemoval[i] = Promise.withResolvers();
await pendingSlotRemoval[i].promise;
}
slots[i] = { value };
shiftMaxValueIfFull();
}
drains[i] = true;
pendingSlotRemoval.map((e) => e?.resolve());
await Promise.all(pendingSlotRemoval.map((e) => e?.promise));
const allDrain = drains.every(Boolean);
if (allDrain) {
while (slots.some((e) => e !== undefined))
shiftMaxValueIfFull();
ctrl.close();
}
function shiftMaxValueIfFull() {
const isFull = slots.every((slot, i2) => slot !== undefined || drains[i2]);
if (!isFull)
return false;
const fullSlots = slots.flatMap((e) => e !== undefined ? [e] : []).map((e) => e.value);
const maxValue = sortBy2(ordFn, fullSlots).toReversed()[0];
const maxIndex = slots.findIndex((e) => e?.value === maxValue);
if (lastMaxValue !== undefined) {
const ordered = sortBy2(ordFn, [maxValue, lastMaxValue]);
ordered[0] === maxValue && ordered[1] === lastMaxValue || DIE8(`
MergeDescendError: one of source stream is not descending ordered.
stream index: ${maxIndex}
prev: ${ordFn(lastMaxValue)}
prev: ${JSON.stringify(lastMaxValue)}
curr: ${ordFn(maxValue)}
curr: ${JSON.stringify(maxValue)}
`);
}
lastMaxValue = maxValue;
ctrl.enqueue(maxValue);
slots[maxIndex] = undefined;
pendingSlotRemoval[maxIndex]?.resolve();
pendingSlotRemoval[maxIndex] = undefined;
return true;
}
}));
}
}, { highWaterMark: 0 }));
};
// src/pageStream.ts
function pageStream(initialQuery, fetcher) {
let query = null;
return new ReadableStream({
pull: async (ctrl) => {
if (query === null)
query = { value: await initialQuery };
const ret = fetcher(query.value);
const val = ret instanceof Promise ? await ret : ret;
const { data, next } = val;
if (data !== undefined)
ctrl.enqueue(data);
if (next == null)
return ctrl.close();
query.value = next;
}
}, { highWaterMark: 0 });
}
// src/pageFlow.ts
function pageFlow(initialCursor, fetcher) {
return sflow(pageStream(initialCursor, fetcher));
}
// src/rangeStream.ts
function rangeStream(...args) {
const [min, max] = args[1] != null ? [args[0], args[1]] : [0, args[0]];
let i = min;
return new ReadableStream({
pull: (ctrl) => {
ctrl.enqueue(i);
if (++i >= max)
ctrl.close();
}
}, { highWaterMark: 0 });
}
function rangeFlow(...args) {
return sflow(rangeStream(...args));
}
// src/sfTemplate.ts
function sfTemplate(tsa, ...args) {
return sflow(...tsa.map((str) => [sflow([str]), args.shift() || []]).flat());
}
var sfT = sfTemplate;
// src/svector.ts
var svector = (...src) => sflow(src);
// src/sf.ts
var exports_sf = {};
__export(exports_sf, {
unwinds: () => unwinds,
unpromises: () => unpromises,
uniqs: () => uniqs,
uniqBys: () => uniqBys,
throughs: () => throughs,
throttles: () => throttles,
tees: () => tees,
tails: () => tails,
svector: () => svector,
sv: () => svector,
streamAsyncIterator: () => streamAsyncIterator,
snoflow: () => sflow,
slices: () => slices,
skips: () => skips,
sflow: () => sflow,
sfTemplate: () => sfTemplate,
sfT: () => sfT,
sf: () => sflow,
replaces: () => replaces,
replaceAlls: () => replaceAlls,
reduces: () => reduces,
rangeStream: () => rangeStream,
rangeFlow: () => rangeFlow,
portals: () => portals,
peeks: () => peeks,
pageStream: () => pageStream,
pageFlow: () => pageFlow,
pMaps: () => pMaps,
nils: () => nils,
nil: () => nil,
merges: () => merges,
mergeStreamsByDescend: () => mergeStreamsByDescend,
mergeStreamsByAscend: () => mergeStreamsByAscend,
mergeStreamsBy: () => mergeStreamsBy,
mergeStream: () => mergeStream,
mergeDescends: () => mergeDescends,
mergeAscends: () => mergeAscends,
matchs: () => matchs,
matchAlls: () => matchAlls,
maps: () => maps,
mapAddFields: () => mapAddFields,
logs: () => logs,
lines: () => lines,
joins: () => merges,
intervals: () => chunkIntervals,
forEachs: () => forEachs,
flats: () => flats,
flatMaps: () => flatMaps,
filters: () => filters,
distributesBy: () => distributeBys,
debounces: () => debounces,
confluences: () => confluences,
concats: () => concats,
concatStream: () => concatStream,
composers: () => composers,
chunks: () => chunks,
chunkTransforms: () => chunkTransforms,
chunkOverlaps: () => chunkOverlaps,
chunkIntervals: () => chunkIntervals,
chunkIfs: () => chunkIfs,
chunkBys: () => chunkBys,
cacheTails: () => cacheTails,
cacheSkips: () => cacheSkips,
cacheLists: () => cacheLists,
bys: () => bys,
buffers: () => chunks,
aborts: () => terminates,
TextEncoderStream: () => default3,
TextDecoderStream: () => default2
});
import { default as default2 } from "polyfill-text-decoder-stream";
import { default as default3 } from "polyfill-text-encoder-stream";
// src/composers.ts
function composers(stream) {
return Object.assign(stream, {
by: (appendStream) => composers({
writable: stream.writable,
readable: stream.readable.pipeThrough(appendStream)
})
});
}
// src/index.ts
var src_default = sflow;
export {
unwinds,
unpromises,
uniqs,
uniqBys,
throughs,
throttles,
tees,
tails,
svector,
svector as sv,
streamAsyncIterator,
sflow as snoflow,
slices,
skips,
sflow,
sfTemplate,
sfT,
exports_sf as sf,
replaces,
replaceAlls,
reduces,
rangeStream,
rangeFlow,
portals,
peeks,
pageStream,
pageFlow,
pMaps,
nils,
nil,
merges,
mergeStreamsByDescend,
mergeStreamsByAscend,
mergeStreamsBy,
mergeStream,
mergeDescends,
mergeAscends,
matchs,
matchAlls,
maps,
mapAddFields,
logs,
lines,
merges as joins,
chunkIntervals as intervals,
forEachs,
flats,
flatMaps,
finds,
filters,
distributeBys as distributesBy,
src_default as default,
debounces,
confluences,
concats,
concatStream,
chunks,
chunkTransforms,
chunkOverlaps,
chunkIntervals,
chunkIfs,
chunkBys,
cacheTails,
cacheSkips,
cacheLists,
bys,
chunks as buffers,
andIgnoreError,
terminates as aborts,
default5 as TextEncoderStream,
default4 as TextDecoderStream
};
//# debugId=35D754EC9CB04BDB64756E2164756E21