tupleson
Version:
A hackable JSON serializer/deserializer
226 lines • 6.81 kB
JavaScript
import { TsonCircularReferenceError } from "../errors.js";
import { assert } from "../internals/assert.js";
import { getDefaultNonce } from "../internals/getNonce.js";
import { mapOrReturn } from "../internals/mapOrReturn.js";
import { TsonStreamInterruptedError } from "./asyncErrors.js";
import { createReadableStream, createServerEvent } from "./iterableUtils.js";
function walkerFactory(nonce, types) {
let asyncIndex = 0;
const seen = /* @__PURE__ */ new WeakSet();
const cache = /* @__PURE__ */ new WeakMap();
const iterators = /* @__PURE__ */ new Map();
const iterator = {
async *[Symbol.asyncIterator]() {
const nextAsyncIteratorValue = /* @__PURE__ */ new Map();
while (iterators.size > 0) {
for (const [idx2, iterator2] of iterators) {
if (!nextAsyncIteratorValue.has(idx2)) {
nextAsyncIteratorValue.set(
idx2,
iterator2.next().then((result2) => [idx2, result2])
);
}
}
const nextValues = Array.from(nextAsyncIteratorValue.values());
const [idx, result] = await Promise.race(nextValues);
if (result.done) {
nextAsyncIteratorValue.delete(idx);
iterators.delete(idx);
continue;
} else {
const iterator2 = iterators.get(idx);
assert(iterator2, `iterator ${idx} not found`);
nextAsyncIteratorValue.set(
idx,
iterator2.next().then((result2) => [idx, result2])
);
}
const valueTuple = [idx, walk(result.value)];
yield valueTuple;
}
}
};
const handlers = (() => {
const all = types.map((handler) => {
const $serialize = handler.serializeIterator ? (value) => {
const idx = asyncIndex++;
const iterator2 = handler.serializeIterator({
value
});
iterators.set(idx, iterator2[Symbol.asyncIterator]());
return [handler.key, idx, nonce];
} : handler.serialize ? (value, nonce2, walk2) => [
handler.key,
walk2(handler.serialize(value)),
nonce2
] : (value, _nonce, walk2) => walk2(value);
return {
...handler,
$serialize
};
});
const byPrimitive2 = {};
const nonPrimitive2 = [];
for (const handler of all) {
if (handler.primitive) {
if (byPrimitive2[handler.primitive]) {
throw new Error(
`Multiple handlers for primitive ${handler.primitive} found`
);
}
byPrimitive2[handler.primitive] = handler;
} else {
nonPrimitive2.push(handler);
}
}
return [nonPrimitive2, byPrimitive2];
})();
const [nonPrimitive, byPrimitive] = handlers;
const walk = (value) => {
const type = typeof value;
const isComplex = !!value && type === "object";
if (isComplex) {
if (seen.has(value)) {
const cached = cache.get(value);
if (!cached) {
throw new TsonCircularReferenceError(value);
}
return cached;
}
seen.add(value);
}
const cacheAndReturn = (result) => {
if (isComplex) {
cache.set(value, result);
}
return result;
};
const primitiveHandler = byPrimitive[type];
if (primitiveHandler && (!primitiveHandler.test || primitiveHandler.test(value))) {
return cacheAndReturn(primitiveHandler.$serialize(value, nonce, walk));
}
for (const handler of nonPrimitive) {
if (handler.test(value)) {
return cacheAndReturn(handler.$serialize(value, nonce, walk));
}
}
return cacheAndReturn(mapOrReturn(value, walk));
};
return [walk, iterator];
}
function createAsyncTsonSerialize(opts) {
const getNonce = opts.nonce ?? getDefaultNonce;
return (value) => {
const nonce = getNonce();
const [walk, iterator] = walkerFactory(nonce, opts.types);
return [
{
json: walk(value),
nonce
},
iterator
];
};
}
function createTsonStreamAsync(opts) {
const indent = (length) => " ".repeat(length);
const stringifier = async function* stringify(value, space = 0) {
const [head, iterator] = createAsyncTsonSerialize(opts)(value);
yield "[\n";
yield indent(space * 1) + JSON.stringify(head) + "\n";
yield indent(space * 1) + ",\n";
yield indent(space * 1) + "[\n";
let isFirstStreamedValue = true;
for await (const value2 of iterator) {
const prefix = indent(space * 2) + (isFirstStreamedValue ? "" : ",");
yield prefix + JSON.stringify(value2) + "\n";
isFirstStreamedValue = false;
}
yield "]]\n";
};
return stringifier;
}
function createTsonSSEResponse(opts) {
const serialize = createAsyncTsonSerialize(opts);
return (value) => {
const [readable, controller] = createReadableStream();
async function iterate() {
const [head, iterable] = serialize(value);
controller.enqueue(
createServerEvent({
data: head
//event: "head",
// id: "0",
// retry: 0,
})
);
for await (const chunk of iterable) {
controller.enqueue(
createServerEvent({
data: chunk
// event: "tson",
// id: "0",
// retry: 0,
})
);
}
controller.enqueue(
createServerEvent({
data: null,
event: "close"
// id: "0",
// retry: 0,
})
);
controller.close();
controller.error(
new TsonStreamInterruptedError(new Error("SSE stream ended"))
);
}
iterate().catch((err) => {
controller.error(err);
});
const res = new Response(readable, {
headers: {
"Cache-Control": "no-cache",
Connection: "keep-alive",
"Content-Type": "text/event-stream",
// prevent buffering by nginx
"X-Accel-Buffering": "no"
},
status: 200
});
return res;
};
}
function createTsonSerializeJsonStreamResponse(opts) {
const serialize = createTsonStreamAsync(opts);
return (value) => {
const [readable, controller] = createReadableStream();
async function iterate() {
for await (const chunk of serialize(value)) {
controller.enqueue(chunk);
}
controller.close();
}
iterate().catch((err) => {
controller.error(err);
});
const res = new Response(readable, {
headers: {
"Cache-Control": "no-cache",
Connection: "keep-alive",
"Content-Type": "application/json"
},
status: 200
});
return res;
};
}
export {
createAsyncTsonSerialize,
createTsonSSEResponse,
createTsonSerializeJsonStreamResponse,
createTsonStreamAsync
};
//# sourceMappingURL=serializeAsync.mjs.map