tupleson
Version:
A hackable JSON serializer/deserializer
253 lines • 8.26 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var serializeAsync_exports = {};
__export(serializeAsync_exports, {
createAsyncTsonSerialize: () => createAsyncTsonSerialize,
createTsonSSEResponse: () => createTsonSSEResponse,
createTsonSerializeJsonStreamResponse: () => createTsonSerializeJsonStreamResponse,
createTsonStreamAsync: () => createTsonStreamAsync
});
module.exports = __toCommonJS(serializeAsync_exports);
var import_errors = require("../errors.js");
var import_assert = require("../internals/assert.js");
var import_getNonce = require("../internals/getNonce.js");
var import_mapOrReturn = require("../internals/mapOrReturn.js");
var import_asyncErrors = require("./asyncErrors.js");
var import_iterableUtils = require("./iterableUtils.js");
function walkerFactory(nonce, types) {
let asyncIndex = 0;
const seen = /* @__PURE__ */ new WeakSet();
const cache = /* @__PURE__ */ new WeakMap();
const iterators = /* @__PURE__ */ new Map();
const iterator = {
async *[Symbol.asyncIterator]() {
const nextAsyncIteratorValue = /* @__PURE__ */ new Map();
while (iterators.size > 0) {
for (const [idx2, iterator2] of iterators) {
if (!nextAsyncIteratorValue.has(idx2)) {
nextAsyncIteratorValue.set(
idx2,
iterator2.next().then((result2) => [idx2, result2])
);
}
}
const nextValues = Array.from(nextAsyncIteratorValue.values());
const [idx, result] = await Promise.race(nextValues);
if (result.done) {
nextAsyncIteratorValue.delete(idx);
iterators.delete(idx);
continue;
} else {
const iterator2 = iterators.get(idx);
(0, import_assert.assert)(iterator2, `iterator ${idx} not found`);
nextAsyncIteratorValue.set(
idx,
iterator2.next().then((result2) => [idx, result2])
);
}
const valueTuple = [idx, walk(result.value)];
yield valueTuple;
}
}
};
const handlers = (() => {
const all = types.map((handler) => {
const $serialize = handler.serializeIterator ? (value) => {
const idx = asyncIndex++;
const iterator2 = handler.serializeIterator({
value
});
iterators.set(idx, iterator2[Symbol.asyncIterator]());
return [handler.key, idx, nonce];
} : handler.serialize ? (value, nonce2, walk2) => [
handler.key,
walk2(handler.serialize(value)),
nonce2
] : (value, _nonce, walk2) => walk2(value);
return {
...handler,
$serialize
};
});
const byPrimitive2 = {};
const nonPrimitive2 = [];
for (const handler of all) {
if (handler.primitive) {
if (byPrimitive2[handler.primitive]) {
throw new Error(
`Multiple handlers for primitive ${handler.primitive} found`
);
}
byPrimitive2[handler.primitive] = handler;
} else {
nonPrimitive2.push(handler);
}
}
return [nonPrimitive2, byPrimitive2];
})();
const [nonPrimitive, byPrimitive] = handlers;
const walk = (value) => {
const type = typeof value;
const isComplex = !!value && type === "object";
if (isComplex) {
if (seen.has(value)) {
const cached = cache.get(value);
if (!cached) {
throw new import_errors.TsonCircularReferenceError(value);
}
return cached;
}
seen.add(value);
}
const cacheAndReturn = (result) => {
if (isComplex) {
cache.set(value, result);
}
return result;
};
const primitiveHandler = byPrimitive[type];
if (primitiveHandler && (!primitiveHandler.test || primitiveHandler.test(value))) {
return cacheAndReturn(primitiveHandler.$serialize(value, nonce, walk));
}
for (const handler of nonPrimitive) {
if (handler.test(value)) {
return cacheAndReturn(handler.$serialize(value, nonce, walk));
}
}
return cacheAndReturn((0, import_mapOrReturn.mapOrReturn)(value, walk));
};
return [walk, iterator];
}
function createAsyncTsonSerialize(opts) {
const getNonce = opts.nonce ?? import_getNonce.getDefaultNonce;
return (value) => {
const nonce = getNonce();
const [walk, iterator] = walkerFactory(nonce, opts.types);
return [
{
json: walk(value),
nonce
},
iterator
];
};
}
function createTsonStreamAsync(opts) {
const indent = (length) => " ".repeat(length);
const stringifier = async function* stringify(value, space = 0) {
const [head, iterator] = createAsyncTsonSerialize(opts)(value);
yield "[\n";
yield indent(space * 1) + JSON.stringify(head) + "\n";
yield indent(space * 1) + ",\n";
yield indent(space * 1) + "[\n";
let isFirstStreamedValue = true;
for await (const value2 of iterator) {
const prefix = indent(space * 2) + (isFirstStreamedValue ? "" : ",");
yield prefix + JSON.stringify(value2) + "\n";
isFirstStreamedValue = false;
}
yield "]]\n";
};
return stringifier;
}
function createTsonSSEResponse(opts) {
const serialize = createAsyncTsonSerialize(opts);
return (value) => {
const [readable, controller] = (0, import_iterableUtils.createReadableStream)();
async function iterate() {
const [head, iterable] = serialize(value);
controller.enqueue(
(0, import_iterableUtils.createServerEvent)({
data: head
//event: "head",
// id: "0",
// retry: 0,
})
);
for await (const chunk of iterable) {
controller.enqueue(
(0, import_iterableUtils.createServerEvent)({
data: chunk
// event: "tson",
// id: "0",
// retry: 0,
})
);
}
controller.enqueue(
(0, import_iterableUtils.createServerEvent)({
data: null,
event: "close"
// id: "0",
// retry: 0,
})
);
controller.close();
controller.error(
new import_asyncErrors.TsonStreamInterruptedError(new Error("SSE stream ended"))
);
}
iterate().catch((err) => {
controller.error(err);
});
const res = new Response(readable, {
headers: {
"Cache-Control": "no-cache",
Connection: "keep-alive",
"Content-Type": "text/event-stream",
// prevent buffering by nginx
"X-Accel-Buffering": "no"
},
status: 200
});
return res;
};
}
function createTsonSerializeJsonStreamResponse(opts) {
const serialize = createTsonStreamAsync(opts);
return (value) => {
const [readable, controller] = (0, import_iterableUtils.createReadableStream)();
async function iterate() {
for await (const chunk of serialize(value)) {
controller.enqueue(chunk);
}
controller.close();
}
iterate().catch((err) => {
controller.error(err);
});
const res = new Response(readable, {
headers: {
"Cache-Control": "no-cache",
Connection: "keep-alive",
"Content-Type": "application/json"
},
status: 200
});
return res;
};
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
createAsyncTsonSerialize,
createTsonSSEResponse,
createTsonSerializeJsonStreamResponse,
createTsonStreamAsync
});
//# sourceMappingURL=serializeAsync.js.map
;