UNPKG

@ayonli/jsext

Version:

A JavaScript extension package for building strong and modern applications.

466 lines (462 loc) 17.8 kB
'use strict'; var chan = require('../chan.js'); var env = require('../env.js'); var parallel_channel = require('./channel.js'); var module_util = require('../module/util.js'); var object = require('../object.js'); var number = require('../number.js'); var error = require('../error.js'); var path = require('../path.js'); var runtime = require('../runtime.js'); var error_Exception = require('../error/Exception.js'); var path_util = require('../path/util.js'); var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null; const workerIdCounter = number.serial(true); let workerPool = []; let gcTimer; const remoteTasks = new Map(); const getMaxParallelism = (async () => { if (typeof navigator === "object" && navigator.hardwareConcurrency) { return navigator.hardwareConcurrency; } else if (env.isNode) { const os = await import('os'); if (typeof os.availableParallelism === "function") { return os.availableParallelism(); } else { return os.cpus().length; } } else { return 8; } })(); function isCallResponse(msg) { return msg && typeof msg === "object" && ["return", "yield", "error", "gen"].includes(msg.type); } function getModuleDir(importMetaPath) { if (path.extname(importMetaPath) === ".ts") { return path.resolve(importMetaPath, "../.."); } let _dirname = path.dirname(importMetaPath); if (path_util.endsWith(_dirname, "jsext/bundle")) { // The application imports the bundled version of this module return path.dirname(_dirname); } else { // The application imports the compiled version of this module return path.resolve(_dirname, "../.."); } } async function getWorkerEntry(parallel = {}) { if (env.isDeno) { if (parallel.workerEntry) { return parallel.workerEntry; } else if (undefined) { // The code is bundled, try the remote worker entry. if ((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.src || new URL('parallel/threads.js', document.baseURI).href)).includes("jsr.io")) { return "jsr:@ayonli/jsext/worker.ts"; } else { return "https://ayonli.github.io/jsext/bundle/worker.mjs"; } } else { if ((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.src || new URL('parallel/threads.js', document.baseURI).href)).includes("jsr.io")) { return "jsr:@ayonli/jsext/worker.ts"; } else { const _dirname = getModuleDir((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.src || new URL('parallel/threads.js', document.baseURI).href))); return path.join(_dirname, "worker.ts"); } } } else if (env.isNodeLike) { if (parallel.workerEntry) { return parallel.workerEntry; } const _filename = path.toFsPath((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.src || new URL('parallel/threads.js', document.baseURI).href))); if (_filename === process.argv[1]) { // The code is bundled, try the worker entry in node_modules // (hope it exists). const _dirname = path.join(path.cwd(), "node_modules/@ayonli/jsext"); if (env.isBun) { if (path.extname(_filename) === ".ts") { return path.join(_dirname, "worker.ts"); } else { return path.join(_dirname, "bundle/worker.mjs"); } } else { return path.join(_dirname, "bundle/worker-node.mjs"); } } else { const _dirname = getModuleDir(_filename); if (env.isBun) { if (path.extname(_filename) === ".ts") { return path.join(_dirname, "worker.ts"); } else { return path.join(_dirname, "bundle/worker.mjs"); } } else { return path.join(_dirname, "bundle/worker-node.mjs"); } } } else { if (parallel.workerEntry) { if (path_util.isUrl(parallel.workerEntry)) { return await module_util.getObjectURL(parallel.workerEntry); } else { return parallel.workerEntry; } } else { const url = "https://ayonli.github.io/jsext/bundle/worker.mjs"; return await module_util.getObjectURL(url); } } } async function createWorker(options) { let { adapter = "worker_threads", parallel } = options; const entry = await getWorkerEntry(parallel); if (env.isNode || env.isBun) { if (adapter === "child_process") { const { fork } = await import('child_process'); const serialization = env.isNodeBelow14 ? "json" : "advanced"; const worker = fork(entry, ["--worker-thread"], { stdio: "inherit", serialization, }); const workerId = worker.pid; await new Promise((resolve, reject) => { worker.once("error", reject); worker.once("message", () => { worker.off("error", reject); resolve(); }); }); return { worker, workerId, kind: "node_process", }; } else if (env.isNode) { const { Worker } = await import('worker_threads'); const worker = new Worker(entry, { argv: ["--worker-thread"] }); const workerId = worker.threadId; await new Promise((resolve, reject) => { worker.once("error", reject); worker.once("online", () => { worker.off("error", reject); resolve(); }); }); return { worker, workerId, kind: "node_worker", }; } else { // isBun const worker = new Worker(entry, { type: "module" }); const workerId = workerIdCounter.next().value; await new Promise((resolve, reject) => { worker.onerror = (ev) => { reject(new Error(ev.message || "unable to start the worker")); }; worker.addEventListener("open", () => { // @ts-ignore worker.onerror = null; resolve(); }); }); return { worker, workerId, kind: "bun_worker", }; } } else { // Deno and browsers const worker = new Worker(entry, { type: "module" }); const workerId = workerIdCounter.next().value; return { worker, workerId, kind: "web_worker", }; } } function handleWorkerMessage(poolRecord, worker, msg) { var _a, _b, _c, _d; if (parallel_channel.isChannelMessage(msg)) { parallel_channel.handleChannelMessage(msg); } else if (isCallResponse(msg) && msg.taskId) { const task = remoteTasks.get(msg.taskId); if (!task) return; if (msg.type === "return" || msg.type === "error") { if (msg.type === "error") { const err = object.isPlainObject(msg.error) ? ((_a = error.fromObject(msg.error)) !== null && _a !== void 0 ? _a : msg.error) : msg.error; if (err instanceof Error && (err.message.includes("not be cloned") || ((_b = err.stack) === null || _b === void 0 ? void 0 : _b.includes("not be cloned")) // Node.js v16- )) { Object.defineProperty(err, "stack", { configurable: true, enumerable: false, writable: true, value: (err.stack ? err.stack + "\n " : "") + `at ${task.fn} (${task.module})`, }); } if (task.promise) { task.promise.reject(err); if (task.channel) { task.channel.close(); } } else if (task.channel) { task.channel.close(err); } else { task.error = err; } } else { const value = unwrapReturnValue(msg.value); if (task.promise) { task.promise.resolve(value); } else { task.result = { value }; } if (task.channel) { task.channel.close(); } } poolRecord.tasks.delete(msg.taskId); if (!poolRecord.tasks.size && typeof worker.unref === "function") { // Allow the main thread to exit if the event // loop is empty. worker.unref(); } } else if (msg.type === "yield") { const value = unwrapReturnValue(msg.value); (_c = task.channel) === null || _c === void 0 ? void 0 : _c.send({ value, done: msg.done }); if (msg.done) { // The final message of yield event is the // return value. handleWorkerMessage(poolRecord, worker, { type: "return", value, taskId: msg.taskId, }); } } else if (msg.type === "gen") { (_d = task.generate) === null || _d === void 0 ? void 0 : _d.call(task); } } } function handleWorkerClose(poolRecord, err) { for (const taskId of poolRecord.tasks) { poolRecord.tasks.delete(taskId); const task = remoteTasks.get(taskId); if (task) { if (task.promise) { task.promise.reject(err); if (task.channel) { task.channel.close(); } } else if (task.channel) { task.channel.close(err); } else { task.error = err; } } } workerPool = workerPool.filter(item => item !== poolRecord); } async function acquireWorker(taskId, parallel) { const maxWorkers = parallel.maxWorkers || await getMaxParallelism; let poolRecord = workerPool.find(item => !item.tasks.size); if (poolRecord) { poolRecord.lastAccess = Date.now(); } else if (workerPool.length < maxWorkers) { workerPool.push(poolRecord = { getWorker: (async () => { const worker = (await createWorker({ parallel })) .worker; const handleMessage = handleWorkerMessage.bind(void 0, poolRecord, worker); const handleClose = handleWorkerClose.bind(void 0, poolRecord); if (env.isNode) { worker.on("message", handleMessage) .on("error", handleClose); // In Node.js, worker will exit once erred. } else if (env.isBun) { const _worker = worker; _worker.onmessage = (ev) => handleMessage(ev.data); _worker.onerror = () => _worker.terminate(); // terminate once erred _worker.addEventListener("close", ((ev) => { handleClose(new Error(ev.reason + " (" + ev.code + ")")); })); } else { const _worker = worker; _worker.onmessage = (ev) => handleMessage(ev.data); _worker.onerror = (ev) => { var _a; _worker.terminate(); // ensure termination handleClose((_a = error.fromErrorEvent(ev)) !== null && _a !== void 0 ? _a : new Error("worker exited")); }; } return worker; })(), tasks: new Set(), lastAccess: Date.now(), }); if (!gcTimer) { gcTimer = setInterval(() => { // GC: clean long-time unused workers const now = Date.now(); const idealItems = []; workerPool = workerPool.filter(item => { const ideal = !item.tasks.size && (now - item.lastAccess) >= 10000; if (ideal) { idealItems.push(item); } return !ideal; }); idealItems.forEach(async (item) => { const worker = await item.getWorker; await worker.terminate(); }); }, 1000); runtime.unrefTimer(gcTimer); } } else { poolRecord = workerPool[taskId % workerPool.length]; poolRecord.lastAccess = Date.now(); } poolRecord.tasks.add(taskId); const worker = await poolRecord.getWorker; if ("ref" in worker && typeof worker.ref === "function") { // Prevent premature exit in the main thread. worker.ref(); } return worker; } function wrapArgs(args, getWorker) { const transferable = []; args = args.map(arg => { if (arg instanceof chan.Channel) { return parallel_channel.wrapChannel(arg, (type, msg, channelId) => { getWorker.then(worker => { if (typeof worker["postMessage"] === "function") { try { worker.postMessage({ type, value: msg, channelId, }); } catch (err) { // Suppress error when sending `close` command to // the channel in the worker thread when the thread // is terminated. This situation often occurs when // using `run()` to call function and the `result()` // is called before `channel.close()`. if (!(type === "close" && String(err).includes("Worker has been terminated"))) { throw err; } } } else { worker.send({ type, value: msg, channelId, }); } }); }); } else if ((arg instanceof error_Exception.default) || error.isDOMException(arg) || error.isAggregateError(arg)) { return error.toObject(arg); } if (arg instanceof ArrayBuffer) { transferable.push(arg); } else if (object.isPlainObject(arg)) { for (const key of Object.getOwnPropertyNames(arg)) { const value = arg[key]; if (value instanceof ArrayBuffer) { transferable.push(value); } else if ((value instanceof error_Exception.default) || error.isDOMException(value) || error.isAggregateError(value)) { arg[key] = error.toObject(value); } } } else if (Array.isArray(arg)) { arg = arg.map(item => { if (item instanceof ArrayBuffer) { transferable.push(item); return item; } else if ((item instanceof error_Exception.default) || error.isDOMException(item) || error.isAggregateError(item)) { return error.toObject(item); } else { return item; } }); } return arg; }); return { args, transferable }; } function unwrapReturnValue(value) { if (object.isPlainObject(value) && (value["@@type"] === "Exception" || value["@@type"] === "DOMException" || value["@@type"] === "AggregateError")) { return error.fromObject(value); } return value; } exports.acquireWorker = acquireWorker; exports.createWorker = createWorker; exports.getMaxParallelism = getMaxParallelism; exports.isCallResponse = isCallResponse; exports.remoteTasks = remoteTasks; exports.unwrapReturnValue = unwrapReturnValue; exports.wrapArgs = wrapArgs; //# sourceMappingURL=threads.js.map