UNPKG

tupleson

Version:

A hackable JSON serializer/deserializer

208 lines 6.68 kB
import { TsonError } from "../errors.js"; import { assert } from "../internals/assert.js"; import { isTsonTuple } from "../internals/isTsonTuple.js"; import { mapOrReturn } from "../internals/mapOrReturn.js"; import { TsonAbortError, TsonStreamInterruptedError } from "./asyncErrors.js"; import { createReadableStream, mapIterable, readableStreamToAsyncIterable } from "./iterableUtils.js"; function createTsonDeserializer(opts) { const typeByKey = {}; for (const handler of opts.types) { if (handler.key) { if (typeByKey[handler.key]) { throw new Error(`Multiple handlers for key ${handler.key} found`); } typeByKey[handler.key] = handler; } } return async (iterable, parseOptions) => { const controllers = /* @__PURE__ */ new Map(); const cache = /* @__PURE__ */ new Map(); const iterator = iterable[Symbol.asyncIterator](); const walker = (nonce) => { const walk = (value) => { if (isTsonTuple(value, nonce)) { const [type, serializedValue] = value; const transformer = typeByKey[type]; assert(transformer, `No transformer found for type ${type}`); if (!transformer.async) { const walkedValue = walk(serializedValue); return transformer.deserialize(walkedValue); } const idx = serializedValue; if (cache.has(idx)) { assert( parseOptions.reconnect, "Duplicate index found but reconnect is off" ); return cache.get(idx); } const [readable, controller] = createReadableStream(); assert(controller, "Controller not set - this is a bug"); controllers.set(idx, controller); const result = transformer.deserialize({ close() { controller.close(); controllers.delete(idx); }, reader: readable.getReader() }); cache.set(idx, result); return result; } return mapOrReturn(value, walk); }; return walk; }; async function getStreamedValues(walk) { while (true) { const nextValue = await iterator.next(); if (nextValue.done) { break; } const { value } = nextValue; if (!Array.isArray(value)) { parseOptions.onReconnect?.(); assert( parseOptions.reconnect, "Stream got beginning of results but reconnecting is not enabled" ); await getStreamedValues(walker(value.nonce)); return; } const [index, result] = value; const controller = controllers.get(index); const walkedResult = walk(result); if (!parseOptions.reconnect) { assert(controller, `No stream found for index ${index}`); } controller?.enqueue(walkedResult); } } async function init() { const nextValue = await iterator.next(); if (nextValue.done) { throw new TsonError("Unexpected end of stream before head"); } const head = nextValue.value; const walk = walker(head.nonce); try { const walked = walk(head.json); return walked; } finally { getStreamedValues(walk).catch((cause) => { const err = new TsonStreamInterruptedError(cause); for (const controller of controllers.values()) { controller.enqueue(err); } parseOptions.onStreamError?.(err); }); } } return await init().catch((cause) => { throw new TsonStreamInterruptedError(cause); }); }; } function lineAccumulator() { let accumulator = ""; const lines = []; return { lines, push(chunk) { accumulator += chunk; const parts = accumulator.split("\n"); accumulator = parts.pop() ?? ""; lines.push(...parts); } }; } async function* stringIterableToTsonIterable(iterable) { const acc = lineAccumulator(); const AWAITING_HEAD = 0; const STREAMING_VALUES = 1; const ENDED = 2; let state = AWAITING_HEAD; for await (const str of iterable) { acc.push(str); if (state === AWAITING_HEAD && acc.lines.length >= 2) { acc.lines.shift(); const headLine = acc.lines.shift(); assert(headLine, "No head line found"); const head = JSON.parse(headLine); yield head; state = STREAMING_VALUES; } if (state === STREAMING_VALUES) { while (acc.lines.length) { let str2 = acc.lines.shift(); str2 = str2.trimStart(); if (str2.startsWith(",")) { str2 = str2.slice(1); } if (str2 === "" || str2 === "[" || str2 === ",") { continue; } if (str2 === "]]") { state = ENDED; continue; } yield JSON.parse(str2); } } } assert(state === ENDED, `Stream ended unexpectedly (state ${state})`); } function createTsonParseAsync(opts) { const instance = createTsonDeserializer(opts); return async (iterable, opts2) => { const tsonIterable = stringIterableToTsonIterable(iterable); return await instance(tsonIterable, opts2 ?? {}); }; } function createTsonParseEventSource(opts) { const instance = createTsonDeserializer(opts); return async (url, parseOpts = {}) => { const [stream, controller] = createReadableStream(); const eventSource = new EventSource(url); const { signal } = parseOpts; const onAbort = () => { assert(signal); eventSource.close(); controller.error(new TsonAbortError("Stream aborted by user")); signal.removeEventListener("abort", onAbort); }; signal?.addEventListener("abort", onAbort); eventSource.onmessage = (msg) => { controller.enqueue(JSON.parse(msg.data)); }; eventSource.addEventListener("close", () => { controller.close(); eventSource.close(); }); const iterable = readableStreamToAsyncIterable(stream); return await instance(iterable, parseOpts); }; } function createTsonParseJsonStreamResponse(opts) { const instance = createTsonParseAsync(opts); const textDecoder = opts.textDecoder ?? new TextDecoder(); return async (response) => { assert(response.body, "Response body is empty"); const stringIterator = mapIterable( readableStreamToAsyncIterable(response.body), (v) => textDecoder.decode(v) ); const output = await instance(stringIterator); return output; }; } export { createTsonParseAsync, createTsonParseEventSource, createTsonParseJsonStreamResponse }; //# sourceMappingURL=deserializeAsync.mjs.map