UNPKG

bidc

Version:

Bidirectional Channel for JavaScript

1,023 lines (1,015 loc) 42.8 kB
Object.defineProperty(exports, '__esModule', { value: true }); class DevalueError extends Error { /** * @param {string} message * @param {string[]} keys */ constructor(message, keys){ super(message); this.name = 'DevalueError'; this.path = keys.join(''); } } /** @param {any} thing */ function is_primitive(thing) { return Object(thing) !== thing; } const object_proto_names = /* @__PURE__ */ Object.getOwnPropertyNames(Object.prototype).sort().join('\0'); /** @param {any} thing */ function is_plain_object(thing) { const proto = Object.getPrototypeOf(thing); return proto === Object.prototype || proto === null || Object.getPrototypeOf(proto) === null || Object.getOwnPropertyNames(proto).sort().join('\0') === object_proto_names; } /** @param {any} thing */ function get_type(thing) { return Object.prototype.toString.call(thing).slice(8, -1); } /** @param {string} char */ function get_escaped_char(char) { switch(char){ case '"': return '\\"'; case '<': return '\\u003C'; case '\\': return '\\\\'; case '\n': return '\\n'; case '\r': return '\\r'; case '\t': return '\\t'; case '\b': return '\\b'; case '\f': return '\\f'; case '\u2028': return '\\u2028'; case '\u2029': return '\\u2029'; default: return char < ' ' ? `\\u${char.charCodeAt(0).toString(16).padStart(4, '0')}` : ''; } } /** @param {string} str */ function stringify_string(str) { let result = ''; let last_pos = 0; const len = str.length; for(let i = 0; i < len; i += 1){ const char = str[i]; const replacement = get_escaped_char(char); if (replacement) { result += str.slice(last_pos, i) + replacement; last_pos = i + 1; } } return `"${last_pos === 0 ? str : result + str.slice(last_pos)}"`; } /** @param {Record<string | symbol, any>} object */ function enumerable_symbols(object) { return Object.getOwnPropertySymbols(object).filter((symbol)=>Object.getOwnPropertyDescriptor(object, symbol).enumerable); } const is_identifier = /^[a-zA-Z_$][a-zA-Z_$0-9]*$/; /** @param {string} key */ function stringify_key(key) { return is_identifier.test(key) ? '.' + key : '[' + JSON.stringify(key) + ']'; } /** * Base64 Encodes an arraybuffer * @param {ArrayBuffer} arraybuffer * @returns {string} */ function encode64(arraybuffer) { const dv = new DataView(arraybuffer); let binaryString = ""; for(let i = 0; i < arraybuffer.byteLength; i++){ binaryString += String.fromCharCode(dv.getUint8(i)); } return binaryToAscii(binaryString); } /** * Decodes a base64 string into an arraybuffer * @param {string} string * @returns {ArrayBuffer} */ function decode64(string) { const binaryString = asciiToBinary(string); const arraybuffer = new ArrayBuffer(binaryString.length); const dv = new DataView(arraybuffer); for(let i = 0; i < arraybuffer.byteLength; i++){ dv.setUint8(i, binaryString.charCodeAt(i)); } return arraybuffer; } const KEY_STRING = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; /** * Substitute for atob since it's deprecated in node. * Does not do any input validation. * * @see https://github.com/jsdom/abab/blob/master/lib/atob.js * * @param {string} data * @returns {string} */ function asciiToBinary(data) { if (data.length % 4 === 0) { data = data.replace(/==?$/, ""); } let output = ""; let buffer = 0; let accumulatedBits = 0; for(let i = 0; i < data.length; i++){ buffer <<= 6; buffer |= KEY_STRING.indexOf(data[i]); accumulatedBits += 6; if (accumulatedBits === 24) { output += String.fromCharCode((buffer & 0xff0000) >> 16); output += String.fromCharCode((buffer & 0xff00) >> 8); output += String.fromCharCode(buffer & 0xff); buffer = accumulatedBits = 0; } } if (accumulatedBits === 12) { buffer >>= 4; output += String.fromCharCode(buffer); } else if (accumulatedBits === 18) { buffer >>= 2; output += String.fromCharCode((buffer & 0xff00) >> 8); output += String.fromCharCode(buffer & 0xff); } return output; } /** * Substitute for btoa since it's deprecated in node. * Does not do any input validation. * * @see https://github.com/jsdom/abab/blob/master/lib/btoa.js * * @param {string} str * @returns {string} */ function binaryToAscii(str) { let out = ""; for(let i = 0; i < str.length; i += 3){ /** @type {[number, number, number, number]} */ const groupsOfSix = [ undefined, undefined, undefined, undefined ]; groupsOfSix[0] = str.charCodeAt(i) >> 2; groupsOfSix[1] = (str.charCodeAt(i) & 0x03) << 4; if (str.length > i + 1) { groupsOfSix[1] |= str.charCodeAt(i + 1) >> 4; groupsOfSix[2] = (str.charCodeAt(i + 1) & 0x0f) << 2; } if (str.length > i + 2) { groupsOfSix[2] |= str.charCodeAt(i + 2) >> 6; groupsOfSix[3] = str.charCodeAt(i + 2) & 0x3f; } for(let j = 0; j < groupsOfSix.length; j++){ if (typeof groupsOfSix[j] === "undefined") { out += "="; } else { out += KEY_STRING[groupsOfSix[j]]; } } } return out; } const UNDEFINED = -1; const HOLE = -2; const NAN = -3; const POSITIVE_INFINITY = -4; const NEGATIVE_INFINITY = -5; const NEGATIVE_ZERO = -6; /** * Revive a value serialized with `devalue.stringify` * @param {string} serialized * @param {Record<string, (value: any) => any>} [revivers] */ function parse$1(serialized, revivers) { return unflatten(JSON.parse(serialized), revivers); } /** * Revive a value flattened with `devalue.stringify` * @param {number | any[]} parsed * @param {Record<string, (value: any) => any>} [revivers] */ function unflatten(parsed, revivers) { if (typeof parsed === 'number') return hydrate(parsed, true); if (!Array.isArray(parsed) || parsed.length === 0) { throw new Error('Invalid input'); } const values = /** @type {any[]} */ parsed; const hydrated = Array(values.length); /** * @param {number} index * @returns {any} */ function hydrate(index, standalone = false) { if (index === UNDEFINED) return undefined; if (index === NAN) return NaN; if (index === POSITIVE_INFINITY) return Infinity; if (index === NEGATIVE_INFINITY) return -Infinity; if (index === NEGATIVE_ZERO) return -0; if (standalone || typeof index !== 'number') { throw new Error(`Invalid input`); } if (index in hydrated) return hydrated[index]; const value = values[index]; if (!value || typeof value !== 'object') { hydrated[index] = value; } else if (Array.isArray(value)) { if (typeof value[0] === 'string') { const type = value[0]; const reviver = revivers?.[type]; if (reviver) { return hydrated[index] = reviver(hydrate(value[1])); } switch(type){ case 'Date': hydrated[index] = new Date(value[1]); break; case 'Set': const set = new Set(); hydrated[index] = set; for(let i = 1; i < value.length; i += 1){ set.add(hydrate(value[i])); } break; case 'Map': const map = new Map(); hydrated[index] = map; for(let i = 1; i < value.length; i += 2){ map.set(hydrate(value[i]), hydrate(value[i + 1])); } break; case 'RegExp': hydrated[index] = new RegExp(value[1], value[2]); break; case 'Object': hydrated[index] = Object(value[1]); break; case 'BigInt': hydrated[index] = BigInt(value[1]); break; case 'null': const obj = Object.create(null); hydrated[index] = obj; for(let i = 1; i < value.length; i += 2){ obj[value[i]] = hydrate(value[i + 1]); } break; case 'Int8Array': case 'Uint8Array': case 'Uint8ClampedArray': case 'Int16Array': case 'Uint16Array': case 'Int32Array': case 'Uint32Array': case 'Float32Array': case 'Float64Array': case 'BigInt64Array': case 'BigUint64Array': { const TypedArrayConstructor = globalThis[type]; const typedArray = new TypedArrayConstructor(hydrate(value[1])); hydrated[index] = value[2] !== undefined ? typedArray.subarray(value[2], value[3]) : typedArray; break; } case 'ArrayBuffer': { const base64 = value[1]; const arraybuffer = decode64(base64); hydrated[index] = arraybuffer; break; } case 'Temporal.Duration': case 'Temporal.Instant': case 'Temporal.PlainDate': case 'Temporal.PlainTime': case 'Temporal.PlainDateTime': case 'Temporal.PlainMonthDay': case 'Temporal.PlainYearMonth': case 'Temporal.ZonedDateTime': { const temporalName = type.slice(9); // @ts-expect-error TS doesn't know about Temporal yet hydrated[index] = Temporal[temporalName].from(value[1]); break; } case 'URL': { const url = new URL(value[1]); hydrated[index] = url; break; } case 'URLSearchParams': { const url = new URLSearchParams(value[1]); hydrated[index] = url; break; } default: throw new Error(`Unknown type ${type}`); } } else { const array = new Array(value.length); hydrated[index] = array; for(let i = 0; i < value.length; i += 1){ const n = value[i]; if (n === HOLE) continue; array[i] = hydrate(n); } } } else { /** @type {Record<string, any>} */ const object = {}; hydrated[index] = object; for(const key in value){ if (key === '__proto__') { throw new Error('Cannot parse an object with a `__proto__` property'); } const n = value[key]; object[key] = hydrate(n); } } return hydrated[index]; } return hydrate(0); } /** * Turn a value into a JSON string that can be parsed with `devalue.parse` * @param {any} value * @param {Record<string, (value: any) => any>} [reducers] */ function stringify$1(value, reducers) { /** @type {any[]} */ const stringified = []; /** @type {Map<any, number>} */ const indexes = new Map(); /** @type {Array<{ key: string, fn: (value: any) => any }>} */ const custom = []; if (reducers) { for (const key of Object.getOwnPropertyNames(reducers)){ custom.push({ key, fn: reducers[key] }); } } /** @type {string[]} */ const keys = []; let p = 0; /** @param {any} thing */ function flatten(thing) { if (thing === undefined) return UNDEFINED; if (Number.isNaN(thing)) return NAN; if (thing === Infinity) return POSITIVE_INFINITY; if (thing === -Infinity) return NEGATIVE_INFINITY; if (thing === 0 && 1 / thing < 0) return NEGATIVE_ZERO; if (indexes.has(thing)) return indexes.get(thing); const index = p++; indexes.set(thing, index); for (const { key, fn } of custom){ const value = fn(thing); if (value) { stringified[index] = `["${key}",${flatten(value)}]`; return index; } } let str = ''; if (is_primitive(thing)) { str = stringify_primitive(thing); } else { const type = get_type(thing); switch(type){ case 'Number': case 'String': case 'Boolean': str = `["Object",${stringify_primitive(thing)}]`; break; case 'BigInt': str = `["BigInt",${thing}]`; break; case 'Date': const valid = !isNaN(thing.getDate()); str = `["Date","${valid ? thing.toISOString() : ''}"]`; break; case 'URL': str = `["URL",${stringify_string(thing.toString())}]`; break; case 'URLSearchParams': str = `["URLSearchParams",${stringify_string(thing.toString())}]`; break; case 'RegExp': const { source, flags } = thing; str = flags ? `["RegExp",${stringify_string(source)},"${flags}"]` : `["RegExp",${stringify_string(source)}]`; break; case 'Array': str = '['; for(let i = 0; i < thing.length; i += 1){ if (i > 0) str += ','; if (i in thing) { keys.push(`[${i}]`); str += flatten(thing[i]); keys.pop(); } else { str += HOLE; } } str += ']'; break; case 'Set': str = '["Set"'; for (const value of thing){ str += `,${flatten(value)}`; } str += ']'; break; case 'Map': str = '["Map"'; for (const [key, value] of thing){ keys.push(`.get(${is_primitive(key) ? stringify_primitive(key) : '...'})`); str += `,${flatten(key)},${flatten(value)}`; keys.pop(); } str += ']'; break; case 'Int8Array': case 'Uint8Array': case 'Uint8ClampedArray': case 'Int16Array': case 'Uint16Array': case 'Int32Array': case 'Uint32Array': case 'Float32Array': case 'Float64Array': case 'BigInt64Array': case 'BigUint64Array': { /** @type {import("./types.js").TypedArray} */ const typedArray = thing; str = '["' + type + '",' + flatten(typedArray.buffer); const a = thing.byteOffset; const b = a + thing.byteLength; // handle subarrays if (a > 0 || b !== typedArray.buffer.byteLength) { const m = +/(\d+)/.exec(type)[1] / 8; str += `,${a / m},${b / m}`; } str += ']'; break; } case 'ArrayBuffer': { /** @type {ArrayBuffer} */ const arraybuffer = thing; const base64 = encode64(arraybuffer); str = `["ArrayBuffer","${base64}"]`; break; } case 'Temporal.Duration': case 'Temporal.Instant': case 'Temporal.PlainDate': case 'Temporal.PlainTime': case 'Temporal.PlainDateTime': case 'Temporal.PlainMonthDay': case 'Temporal.PlainYearMonth': case 'Temporal.ZonedDateTime': str = `["${type}",${stringify_string(thing.toString())}]`; break; default: if (!is_plain_object(thing)) { throw new DevalueError(`Cannot stringify arbitrary non-POJOs`, keys); } if (enumerable_symbols(thing).length > 0) { throw new DevalueError(`Cannot stringify POJOs with symbolic keys`, keys); } if (Object.getPrototypeOf(thing) === null) { str = '["null"'; for(const key in thing){ keys.push(stringify_key(key)); str += `,${stringify_string(key)},${flatten(thing[key])}`; keys.pop(); } str += ']'; } else { str = '{'; let started = false; for(const key in thing){ if (started) str += ','; started = true; keys.push(stringify_key(key)); str += `${stringify_string(key)}:${flatten(thing[key])}`; keys.pop(); } str += '}'; } } } stringified[index] = str; return index; } const index = flatten(value); // special case — value is represented as a negative index if (index < 0) return `${index}`; return `[${stringified.join(',')}]`; } /** * @param {any} thing * @returns {string} */ function stringify_primitive(thing) { const type = typeof thing; if (type === 'string') return stringify_string(thing); if (thing instanceof String) return stringify_string(thing.toString()); if (thing === void 0) return UNDEFINED.toString(); if (thing === 0 && 1 / thing < 0) return NEGATIVE_ZERO.toString(); if (type === 'bigint') return `["BigInt","${thing}"]`; return String(thing); } // Simplified Promise Stream Library with compact protocol // A global store to track function references and their IDs // TODO: Solve this with WeakRef maybe? Track refs on the receiver side and // send back removed functions to the sender side along with the return value. const functionRefIds = new Map(); const functionRefsById = new Map(); // Custom stringify and parse functions using devalue function stringify(value, promiseMap) { return stringify$1(value, { F: (fn)=>{ if (typeof fn === 'function') { // Use devalue's Function serializer to handle function detection and replacement if (!functionRefIds.has(fn)) { const id = functionRefIds.size.toString(); functionRefIds.set(fn, id); functionRefsById.set(id, fn); if (functionRefIds.size > 50000) { console.warn('Function reference store is getting large, it is not recommended to send anonymous and inline functions through the channel as they cannot be cached.'); } } return functionRefIds.get(fn); } return undefined; }, P: (promise)=>{ // Use devalue's Promise serializer to handle promise detection and replacement if (promise && typeof promise === 'object' && typeof promise.then === 'function') { if (!promiseMap.has(promise)) promiseMap.set(promise, promiseMap.size.toString()); return promiseMap.get(promise); } return undefined; } }); } function parse(text, promiseResolvers, send) { return parse$1(text, { F: (id)=>{ if (!send) return null; // Resolve function references by ID return async function(...args) { return send({ $$type: `bidc-fn:${id}`, args }); }; }, P: (promiseId)=>{ // Check if promise already exists to avoid creating duplicates if (promiseResolvers.has(promiseId)) { return promiseResolvers.get(promiseId).promise; } let resolvePromise; let rejectPromise; const promise = new Promise((resolve, reject)=>{ resolvePromise = resolve; rejectPromise = reject; }); promiseResolvers.set(promiseId, { resolve: resolvePromise, reject: rejectPromise, promise: promise }); return promise; } }); } /** * Encode a value with promises to an async iterator of string chunks * Uses r[id]: for return data and p0:, p1: etc for promise data */ async function* encode(value) { const pendingPromises = new Map(); const promiseMap = new Map(); // Track resolved promise IDs to prevent re-adding them const resolvedPromiseIds = new Set(); // First, yield the main structure with promise placeholders using devalue's serializer const serializedValue = stringify(value, promiseMap); yield `r:${serializedValue}\n`; // Build pendingPromises map from the promiseMap created during stringify for (const [promise, promiseId] of promiseMap.entries()){ pendingPromises.set(promiseId, promise); } // Then process and yield promise resolutions while(pendingPromises.size > 0){ const promiseEntries = Array.from(pendingPromises.entries()); // Replace Promise.allSettled with Promise.race for immediate chunk sending when any promise resolves // Create promises that resolve with their ID and result const racingPromises = promiseEntries.map(async ([promiseId, promise])=>{ try { const resolvedValue = await promise; return { promiseId, status: 'fulfilled', value: resolvedValue }; } catch (error) { return { promiseId, status: 'rejected', reason: error }; } }); // Race all promises and handle the first one that completes const result = await Promise.race(racingPromises); // Remove the completed promise from pending pendingPromises.delete(result.promiseId); // Mark this promise ID as resolved to prevent re-adding resolvedPromiseIds.add(result.promiseId); if (result.status === 'fulfilled') { // Use stringify with promiseMap for resolved values that might contain more promises const processedValue = stringify(result.value, promiseMap); yield `p${result.promiseId}:${processedValue}\n`; // Add any new promises found in resolved value to pending promises // Only add promises that haven't been resolved yet for (const [promise, newPromiseId] of promiseMap.entries()){ if (!pendingPromises.has(newPromiseId) && !resolvedPromiseIds.has(newPromiseId)) { pendingPromises.set(newPromiseId, promise); } } } else { const errorMessage = result.reason instanceof Error ? result.reason.message : String(result.reason); yield `e${result.promiseId}:${stringify(errorMessage, promiseMap)}\n`; } } } /** * Decode an async iterator of string chunks back to the original value with promises * Returns immediately with unresolved promises that will resolve as chunks arrive */ async function decode(chunks, options) { let result = undefined; // Updated type to include promise instance const promiseResolvers = new Map(); // Process chunks asynchronously after receiving the first "r:" chunk const chunkIterator = chunks[Symbol.asyncIterator](); // Get the first chunk and validate it starts with "r" const firstIteratorResult = await chunkIterator.next(); if (firstIteratorResult.done) { throw new Error('Stream ended without any chunks'); } const firstChunk = firstIteratorResult.value; // Validate that the first chunk starts with "r" (could be "r:" or "r123:") if (!firstChunk.startsWith('r')) { throw new Error("First chunk must start with 'r' (return data)"); } // Extract data from first chunk (everything after the first colon) const colonIndex = firstChunk.indexOf(':'); if (colonIndex === -1) { throw new Error('Invalid first chunk format - missing colon'); } // Parse using devalue with custom Promise deserializer const serializedData = firstChunk.slice(colonIndex + 1); result = parse(serializedData, promiseResolvers, options?.send); // Continue with remaining chunks asynchronously const processRemainingChunks = async ()=>{ try { // Continue processing remaining chunks let iteratorResult = await chunkIterator.next(); while(!iteratorResult.done){ const line = iteratorResult.value; if (line.startsWith('p')) { // Promise resolution data const colonIndex = line.indexOf(':'); const promiseId = line.slice(1, colonIndex); const data = parse(line.slice(colonIndex + 1), promiseResolvers, options?.send); const resolver = promiseResolvers.get(promiseId); if (resolver) { resolver.resolve(data); } } else if (line.startsWith('e')) { // Promise error data const colonIndex = line.indexOf(':'); const promiseId = line.slice(1, colonIndex); const errorMessage = parse(line.slice(colonIndex + 1), promiseResolvers, options?.send); const resolver = promiseResolvers.get(promiseId); if (resolver) { resolver.reject(new Error(errorMessage)); } } iteratorResult = await chunkIterator.next(); } } catch (error) { // Reject any remaining promises if there's an error for (const resolver of promiseResolvers.values()){ resolver.reject(error); } } }; // Start processing remaining chunks asynchronously (don't await) processRemainingChunks(); return result; } // We only make one message channel per target. This maximizes performance // and avoids issues with multiple connections to the same target, as well as // supporting initialization inside useEffect hooks. const connectionCache = new WeakMap(); function createChannel(targetOrChannelId, channelId) { let maybeTarget = undefined; if (typeof channelId === 'undefined' && typeof targetOrChannelId === 'string') { // The first argument is channelId channelId = targetOrChannelId; maybeTarget = undefined; } else if (typeof targetOrChannelId === 'object') { // The first argument is a target maybeTarget = targetOrChannelId; } // Namespaced channelId to avoid conflicts with other libraries / multiple // connections. channelId = 'bidc_' + (channelId ?? 'default'); // Cache key for the message port of the connection const cacheKey = maybeTarget || self; const onResetPortCallbacks = []; function onResetPort(callback) { onResetPortCallbacks.push(callback); } function initPort() { let connected = false; let connectionResolver = null; const connectionPromise = new Promise((resolve)=>{ connectionResolver = resolve; }); function sendMessageWithTransfer(message, transfer) { if (maybeTarget) { if ('self' in maybeTarget && maybeTarget.self === maybeTarget) { // It's an iframe contentWindow maybeTarget.postMessage(message, '*', [ transfer ]); } else { maybeTarget.postMessage(message, [ transfer ]); } } else { // If no target is provided, this might be an iframe or worker context if (typeof window === 'undefined' && typeof self !== 'undefined') { self.postMessage(message, [ transfer ]); } else if (typeof window !== 'undefined' && window.parent && window.parent !== window) { // Inside an iframe, we can use window.parent window.parent.postMessage(message, '*', [ transfer ]); } else { throw new Error('No target provided and no global context available'); } } } const messageChannel = new MessageChannel(); const connectMessage = { type: 'bidc-connect', channelId, timestamp: Date.now() }; const confirmMessage = { type: 'bidc-confirm', channelId }; // Handle handshake requests function handleConnect(event) { const port = event.ports[0]; if (!port) return; const data = event.data; if (data?.channelId !== channelId) return; if (data.type !== connectMessage.type) return; // If the received connect message is older, ignore it. This is because // our connect message should be received already. // Let's wait for the confirmation message instead. if (connectMessage.timestamp <= data.timestamp) { // Send confirmation back to the other side via the port port.postMessage(confirmMessage); if (connected) { // The other side refreshed, we need to reinitialize the connection connectionCache.set(cacheKey, Promise.resolve(port)); onResetPortCallbacks.forEach((callback)=>callback(port)); } else { // Connection sent from the other side connectionResolver(port); connected = true; } } } function handleConfirm(event) { if (event.data?.type === confirmMessage.type && event.data.channelId === channelId) { // Confirm connection established connectionResolver(messageChannel.port1); connected = true; // Remove confirmation listener messageChannel.port1.removeEventListener('message', handleConfirm); } } // Listen for connect messages // These are registered once per target, but we can't unregister them // because we don't know when the target will be restarted or refreshed. if (maybeTarget && typeof Worker !== 'undefined' && maybeTarget instanceof Worker) { maybeTarget.addEventListener('message', handleConnect); } else if (typeof window !== 'undefined') { window.addEventListener('message', handleConnect); } else if (typeof self !== 'undefined') { self.addEventListener('message', handleConnect); } // Listen for confirmation responses messageChannel.port1.addEventListener('message', handleConfirm); messageChannel.port1.start(); // Try to connect to the other side sendMessageWithTransfer(connectMessage, messageChannel.port2); return connectionPromise; } function getPort() { if (!connectionCache.has(cacheKey)) { connectionCache.set(cacheKey, initPort()); } return connectionCache.get(cacheKey); } const responses = new Map(); // Send function const send = async function(data) { // Wait for connection to be established const port = await getPort(); // Generate a unique ID for this message // Ensure that fast concurrent messages don't collide const id = Date.now().toString(36) + Math.random().toString(36).substring(2, 5); // Send chunks with ID prefix for concurrent message support for await (const chunk of encode(data)){ // Prefix each chunk with <id>@ to support concurrent messages const prefixedChunk = `${id}@${chunk}`; port.postMessage(prefixedChunk); } // Wait for a response from receive let resolve; const response = new Promise((r)=>{ // Store the response resolver resolve = r; }); responses.set(id, [ resolve, response ]); return response; }; // Track multiple concurrent decodings by message ID const activeDecodings = new Map(); // Things to clean up when the channel is closed let canceled = false; const disposables = []; let globalReceiveCallback = null; // Receive function const receive = async function(callback) { // Wait for connection to be established await getPort(); if (canceled) return; globalReceiveCallback = callback; }; // Automatically set up the message handler for the port getPort().then((activePort)=>{ if (canceled) return; const messageHandler = async (event)=>{ const rawChunk = event.data; // Skip handshake messages if (typeof rawChunk !== 'string') { return; } // Parse ID from chunk prefix: <id>@<chunk> const atIndex = rawChunk.indexOf('@'); if (atIndex === -1) { console.error('Invalid chunk format - missing @ delimiter:', rawChunk); return; } const messageId = rawChunk.slice(0, atIndex); const chunk = rawChunk.slice(atIndex + 1); // Check if this is the first chunk of a new message (starts with "r:") if (chunk.startsWith('r:')) { // Initialize tracking for this message ID activeDecodings?.set(messageId, { pendingChunks: [], chunkResolver: null }); // Start decoding immediately with async generator const processDecoding = async ()=>{ try { const decoded = await decode(async function*() { // Yield the first chunk immediately yield chunk; // Wait for and yield subsequent chunks for this message ID while(true){ const newChunk = await waitForNewChunkFromMessageEvent(messageId); if (newChunk === null) { break; // End of stream } yield newChunk; } // Clean up tracking for this message ID activeDecodings?.delete(messageId); }(), { send }); // If the decoded data is a function call, we need to handle it // differently. if (typeof decoded === 'object' && decoded !== null && typeof decoded.$$type === 'string' && decoded.$$type.startsWith('bidc-fn:')) { // This is a function call, we need to resolve it const fnId = decoded.$$type.slice(8); const fn = functionRefsById.get(fnId); if (fn) { // Call the function with the provided arguments const response = fn(...decoded.args); void send({ $$type: `bidc-res:${messageId}`, response }); } else { console.error(`Function reference not found for ID: ${fnId}`); } } else if (typeof decoded === 'object' && decoded !== null && typeof decoded.$$type === 'string' && decoded.$$type.startsWith('bidc-res:')) { const resposneMessageId = decoded.$$type.slice(9); const response = decoded.response; const responseResolver = responses.get(resposneMessageId); if (responseResolver) { // Resolve the response promise with the decoded data responseResolver[0](response); responses.delete(resposneMessageId); } } else { // Call the callback with the ID and decoded data if (!globalReceiveCallback) { throw new Error('Global receive callback is not set. This is a bug in BIDC.'); } try { const response = globalReceiveCallback(decoded); void send({ $$type: `bidc-res:${messageId}`, response }); } catch (error) { console.error(error); } } } catch (error) { console.error(`Error decoding stream for ID ${messageId}:`, error); // Clean up tracking for this message ID activeDecodings?.delete(messageId); } }; processDecoding(); } else { // This is a continuation chunk for an existing message const decoding = activeDecodings?.get(messageId); if (decoding) { // Add chunk to pending queue and notify any waiting generators decoding.pendingChunks.push(chunk); decoding.chunkResolver?.(); } else { console.warn(`No active decoding found for ID: ${messageId}`); } } }; // Helper function to wait for new chunks from MessageEvents for a specific message ID async function waitForNewChunkFromMessageEvent(messageId) { const decoding = activeDecodings?.get(messageId); if (!decoding) { return null; } // If we have pending chunks, return the next one if (decoding.pendingChunks.length > 0) { const nextChunk = decoding.pendingChunks.shift(); return nextChunk; } // Otherwise, wait for a new chunk to arrive await new Promise((resolve)=>{ decoding.chunkResolver = resolve; }); if (decoding.pendingChunks.length === 0) { // If no chunks are pending, return null to indicate end of stream return null; } return decoding.pendingChunks.shift(); } activePort.addEventListener('message', messageHandler); disposables.push(()=>{ activePort.removeEventListener('message', messageHandler); }); // Start the MessagePort to begin receiving messages activePort.start(); onResetPort((newPort)=>{ // If the port is reset, we need to reinitialize the connection if (canceled) return; activePort.removeEventListener('message', messageHandler); activePort = newPort; activePort.addEventListener('message', messageHandler); // Start the new port to begin receiving messages activePort.start(); }); }); // Cleanup function to remove listeners and close ports const cleanup = ()=>{ canceled = true; disposables.forEach((dispose)=>dispose()); disposables.length = 0; }; return { send, receive, cleanup }; } exports.createChannel = createChannel; exports.decode = decode; exports.encode = encode;