tupleson
Version:
A hackable JSON serializer/deserializer
230 lines • 8.25 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var deserializeAsync_exports = {};
__export(deserializeAsync_exports, {
createTsonParseAsync: () => createTsonParseAsync,
createTsonParseEventSource: () => createTsonParseEventSource,
createTsonParseJsonStreamResponse: () => createTsonParseJsonStreamResponse
});
module.exports = __toCommonJS(deserializeAsync_exports);
var import_errors = require("../errors.js");
var import_assert = require("../internals/assert.js");
var import_isTsonTuple = require("../internals/isTsonTuple.js");
var import_mapOrReturn = require("../internals/mapOrReturn.js");
var import_asyncErrors = require("./asyncErrors.js");
var import_iterableUtils = require("./iterableUtils.js");
function createTsonDeserializer(opts) {
const typeByKey = {};
for (const handler of opts.types) {
if (handler.key) {
if (typeByKey[handler.key]) {
throw new Error(`Multiple handlers for key ${handler.key} found`);
}
typeByKey[handler.key] = handler;
}
}
return async (iterable, parseOptions) => {
const controllers = /* @__PURE__ */ new Map();
const cache = /* @__PURE__ */ new Map();
const iterator = iterable[Symbol.asyncIterator]();
const walker = (nonce) => {
const walk = (value) => {
if ((0, import_isTsonTuple.isTsonTuple)(value, nonce)) {
const [type, serializedValue] = value;
const transformer = typeByKey[type];
(0, import_assert.assert)(transformer, `No transformer found for type ${type}`);
if (!transformer.async) {
const walkedValue = walk(serializedValue);
return transformer.deserialize(walkedValue);
}
const idx = serializedValue;
if (cache.has(idx)) {
(0, import_assert.assert)(
parseOptions.reconnect,
"Duplicate index found but reconnect is off"
);
return cache.get(idx);
}
const [readable, controller] = (0, import_iterableUtils.createReadableStream)();
(0, import_assert.assert)(controller, "Controller not set - this is a bug");
controllers.set(idx, controller);
const result = transformer.deserialize({
close() {
controller.close();
controllers.delete(idx);
},
reader: readable.getReader()
});
cache.set(idx, result);
return result;
}
return (0, import_mapOrReturn.mapOrReturn)(value, walk);
};
return walk;
};
async function getStreamedValues(walk) {
while (true) {
const nextValue = await iterator.next();
if (nextValue.done) {
break;
}
const { value } = nextValue;
if (!Array.isArray(value)) {
parseOptions.onReconnect?.();
(0, import_assert.assert)(
parseOptions.reconnect,
"Stream got beginning of results but reconnecting is not enabled"
);
await getStreamedValues(walker(value.nonce));
return;
}
const [index, result] = value;
const controller = controllers.get(index);
const walkedResult = walk(result);
if (!parseOptions.reconnect) {
(0, import_assert.assert)(controller, `No stream found for index ${index}`);
}
controller?.enqueue(walkedResult);
}
}
async function init() {
const nextValue = await iterator.next();
if (nextValue.done) {
throw new import_errors.TsonError("Unexpected end of stream before head");
}
const head = nextValue.value;
const walk = walker(head.nonce);
try {
const walked = walk(head.json);
return walked;
} finally {
getStreamedValues(walk).catch((cause) => {
const err = new import_asyncErrors.TsonStreamInterruptedError(cause);
for (const controller of controllers.values()) {
controller.enqueue(err);
}
parseOptions.onStreamError?.(err);
});
}
}
return await init().catch((cause) => {
throw new import_asyncErrors.TsonStreamInterruptedError(cause);
});
};
}
function lineAccumulator() {
let accumulator = "";
const lines = [];
return {
lines,
push(chunk) {
accumulator += chunk;
const parts = accumulator.split("\n");
accumulator = parts.pop() ?? "";
lines.push(...parts);
}
};
}
async function* stringIterableToTsonIterable(iterable) {
const acc = lineAccumulator();
const AWAITING_HEAD = 0;
const STREAMING_VALUES = 1;
const ENDED = 2;
let state = AWAITING_HEAD;
for await (const str of iterable) {
acc.push(str);
if (state === AWAITING_HEAD && acc.lines.length >= 2) {
acc.lines.shift();
const headLine = acc.lines.shift();
(0, import_assert.assert)(headLine, "No head line found");
const head = JSON.parse(headLine);
yield head;
state = STREAMING_VALUES;
}
if (state === STREAMING_VALUES) {
while (acc.lines.length) {
let str2 = acc.lines.shift();
str2 = str2.trimStart();
if (str2.startsWith(",")) {
str2 = str2.slice(1);
}
if (str2 === "" || str2 === "[" || str2 === ",") {
continue;
}
if (str2 === "]]") {
state = ENDED;
continue;
}
yield JSON.parse(str2);
}
}
}
(0, import_assert.assert)(state === ENDED, `Stream ended unexpectedly (state ${state})`);
}
function createTsonParseAsync(opts) {
const instance = createTsonDeserializer(opts);
return async (iterable, opts2) => {
const tsonIterable = stringIterableToTsonIterable(iterable);
return await instance(tsonIterable, opts2 ?? {});
};
}
function createTsonParseEventSource(opts) {
const instance = createTsonDeserializer(opts);
return async (url, parseOpts = {}) => {
const [stream, controller] = (0, import_iterableUtils.createReadableStream)();
const eventSource = new EventSource(url);
const { signal } = parseOpts;
const onAbort = () => {
(0, import_assert.assert)(signal);
eventSource.close();
controller.error(new import_asyncErrors.TsonAbortError("Stream aborted by user"));
signal.removeEventListener("abort", onAbort);
};
signal?.addEventListener("abort", onAbort);
eventSource.onmessage = (msg) => {
controller.enqueue(JSON.parse(msg.data));
};
eventSource.addEventListener("close", () => {
controller.close();
eventSource.close();
});
const iterable = (0, import_iterableUtils.readableStreamToAsyncIterable)(stream);
return await instance(iterable, parseOpts);
};
}
function createTsonParseJsonStreamResponse(opts) {
const instance = createTsonParseAsync(opts);
const textDecoder = opts.textDecoder ?? new TextDecoder();
return async (response) => {
(0, import_assert.assert)(response.body, "Response body is empty");
const stringIterator = (0, import_iterableUtils.mapIterable)(
(0, import_iterableUtils.readableStreamToAsyncIterable)(response.body),
(v) => textDecoder.decode(v)
);
const output = await instance(stringIterator);
return output;
};
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
createTsonParseAsync,
createTsonParseEventSource,
createTsonParseJsonStreamResponse
});
//# sourceMappingURL=deserializeAsync.js.map
;