UNPKG

wrapture

Version:

Wrapture lets you go from a Python-trained model to deployable JavaScript with a single command. It generates TypeScript bindings and a Web/Node-compatible wrapper, using WebGPU/WASM-ready ONNX runtimes.

340 lines (320 loc) 10.2 kB
#!/usr/bin/env node // src/wrapture.ts import chalk2 from "chalk"; import { Command } from "commander"; import ora from "ora"; import { existsSync } from "fs"; import path3 from "path"; // package.json var package_default = { name: "wrapture", version: "0.1.28", description: "Wrapture lets you go from a Python-trained model to deployable JavaScript with a single command. It generates TypeScript bindings and a Web/Node-compatible wrapper, using WebGPU/WASM-ready ONNX runtimes.", keywords: [ "onnx", "pytorch", "torch", "model exporter", "onnx export", "onnx runtime", "onnx to javascript", "machine learning", "ml model conversion", "typescript", "javascript", "cli tool", "webgpu", "wasm", "onnx simplifier", "onnx quantization", "deep learning", "model conversion", "wrapture", "loadModel", "predict" ], homepage: "https://github.com/phun-ky/wrapture#readme", bugs: { url: "https://github.com/phun-ky/wrapture/issues" }, repository: { type: "git", url: "git+https://github.com/phun-ky/wrapture.git" }, funding: "https://github.com/phun-ky/angle?sponsor=1", license: "MIT", author: "Alexander Vassbotn R\xF8yne-Helgesen <alexander@phun-ky.net>", type: "module", bin: { wrapture: "./bin/wrapture.js" }, scripts: { build: "tsup", commit: "npx git-cz", "docs:gen": "node ./node_modules/.bin/typedoc", release: "release-it", start: "node ./bin/wrapture.js", "style:code": "npx putout src", "style:format": "./node_modules/.bin/eslint -c ./eslint.config.mjs src --fix && ./node_modules/.bin/prettier --write ./eslint.config.mjs src", "style:lint": "./node_modules/.bin/eslint -c ./eslint.config.mjs src && ./node_modules/.bin/prettier --check src", test: 'NODE_ENV=test glob -c "node --import tsx --test --no-warnings" "./src/**/__tests__/**/*.[jt]s"', "pretest:ci": "rm -rf coverage && mkdir -p coverage", "test:ci": 'NODE_ENV=test glob -c "node --import tsx --test --no-warnings --experimental-test-coverage --test-reporter=cobertura --test-reporter-destination=coverage/cobertura-coverage.xml --test-reporter=spec --test-reporter-destination=stdout" "./src/**/__tests__/**/*.[jt]s"' }, dependencies: { chalk: "^5.4.1", commander: "^14.0.0", ora: "^8.2.0" }, devDependencies: { "@release-it/conventional-changelog": "^10.0.0", "@rollup/plugin-node-resolve": "^16.0.1", "@rollup/plugin-terser": "^0.4.4", "@stylistic/eslint-plugin": "^5.0.0", "@types/node": "^24.0.2", cobertura: "^1.0.1", eslint: "^9.20.0", "eslint-config-phun-ky": "^1.0.0", "git-cz": "^4.9.0", "onnxruntime-web": "^1.22.0", prettier: "^3.2.5", putout: "^40.1.9", "release-it": "^19.0.1", "remark-github": "^12.0.0", "remark-toc": "^9.0.0", tslib: "^2.3.1", tsup: "^8.4.0", tsx: "^4.7.1", typedoc: "^0.28.3", "typedoc-plugin-frontmatter": "^1.0.0", "typedoc-plugin-markdown": "^4.2.3", "typedoc-plugin-mdn-links": "^5.0.1", "typedoc-plugin-no-inherit": "^1.4.0", "typedoc-plugin-remark": "^2.0.0", "typedoc-plugin-rename-defaults": "^0.7.1", typescript: "^5.0.0", "unified-prettier": "^2.0.1" }, engines: { node: ">=22.0.0", npm: ">=10.8.2" }, publishConfig: { access: "public" } }; // src/utils/check-deps.ts import chalk from "chalk"; import { spawnSync } from "child_process"; var printError = (title, body) => { console.error(` ${chalk.red.bold("\u2718")} ${chalk.red.bold(title)}`); console.error(chalk.white(body)); }; var checkPythonAvailable = () => { const result = spawnSync("python3", ["--version"], { encoding: "utf-8" }); if (result.error || result.status !== 0) { printError( "Python 3 is not available.", "Please install it from https://www.python.org/downloads/ and ensure it is added to your PATH." ); process.exit(1); } }; var checkPythonDeps = () => { const check = spawnSync( "python3", [ "-c", ` import sys missing = [] for module in ['torch', 'onnx', 'onnxsim', 'onnxruntime']: try: __import__(module) except ImportError: missing.append(module) if missing: print(', '.join(missing)) sys.exit(1) ` ], { encoding: "utf-8" } ); if (check.status !== 0) { const missing = check.stdout.trim().split(",").filter(Boolean); printError( "Missing Python dependencies.", `Please install the following packages: ${chalk.yellow( `python3 -m pip install ${missing.join(" ")}` )}` ); process.exit(1); } }; // src/utils/convert.ts import { spawn } from "child_process"; import fs from "fs"; import path from "path"; // src/utils/log-level.ts var currentLevel = "info"; var levels = { silent: 0, error: 1, warn: 2, info: 3, debug: 4 }; var setLogLevel = (level) => { currentLevel = level; }; var shouldLog = (level) => { return levels[level] <= levels[currentLevel]; }; var log = { debug: (...args) => shouldLog("debug") && console.debug("[debug]", ...args), info: (...args) => shouldLog("info") && console.info("[info]", ...args), warn: (...args) => shouldLog("warn") && console.warn("[warn]", ...args), error: (...args) => shouldLog("error") && console.error("[error]", ...args) }; // src/utils/convert.ts var convert = async (inputPath, outputDir, opts) => { if (!fs.existsSync(inputPath)) { throw new Error(`Input model file not found: ${inputPath}`); } log.info("Converting model to ONNX..."); return new Promise((resolve, reject) => { const scriptPath = path.resolve(process.cwd(), "python/convert.py"); log.debug("Script path for `convert.py`:", scriptPath); const args = [ scriptPath, "--input", inputPath, "--output", outputDir, "--format", opts.format || "onnx" ]; if (opts.quantize) args.push("--quantize"); const python = spawn("python3", args); python.stdout.on("data", (data) => { log.debug("[python stdout]", data.toString()); }); python.stderr.on("data", (data) => { log.error("[python stderr]", data.toString()); }); python.on("close", (code) => { if (code === 0) { log.info("Model converted successfully"); resolve(); } else { log.error("Model conversion failed"); reject(new Error(`convert.py exited with code ${code}`)); } }); }); }; // src/utils/generate-wrapper.ts import fs2 from "fs"; import path2 from "path"; var generateWrapper = async (outputDir, opts) => { log.info("Generating wrapper files..."); const wrapper = `import { InferenceSession, Tensor } from 'onnxruntime-web'; const softmax = (logits) => { const exps = logits.map(Math.exp); const sum = exps.reduce((a, b) => a + b, 0); return exps.map(e => e / sum); } const argmax = (arr) => { return arr.reduce((maxIdx, val, idx, src) => val > src[maxIdx] ? idx : maxIdx, 0); } export const loadModel = async () => { const session = await InferenceSession.create( new URL('./${opts.backend === "wasm" ? "model_quant.onnx" : "model.onnx"}', import.meta.url).href ); return { predict: async (input) => { const feeds = { input: new Tensor('float32', input.data, input.dims) }; const results = await session.run(feeds); const raw = results.output.data; if (!(raw instanceof Float32Array)) { throw new Error('Expected Float32Array logits but got something else'); } const logits = raw; const probabilities = softmax(Array.from(logits)); const predictedClass = argmax(probabilities); return { logits, probabilities, predictedClass }; } }; }; `; const typings = `export interface ModelInput { data: Float32Array; dims: number[]; } export interface ModelOutput { logits: Float32Array; probabilities: number[]; predictedClass: number; } export interface LoadedModel { predict(input: ModelInput): Promise<ModelOutput>; } /** * Load the ONNX model and return a wrapper with \`predict()\` function. */ export function loadModel(): Promise<LoadedModel>;`; try { fs2.writeFileSync(path2.join(outputDir, "wrapped.ts"), wrapper); fs2.writeFileSync(path2.join(outputDir, "wrapped.d.ts"), typings); log.info("Wrapper files generated"); } catch (error) { log.error("Failed generate wrapper files"); throw error; } }; // src/wrapture.ts var program = new Command(); program.name("wrapture").description( `\u{1F300} ${chalk2.blue("One-click model exporter: ")}from PyTorch to Web-ready JS/TS. Wrapture lets you go from a Python-trained model to deployable JavaScript with a single command. It generates TypeScript bindings and a Web/Node-compatible wrapper, using WebGPU/WASM-ready ONNX runtimes. Report issues here: https://github.com/phun-ky/wrapture` ).version(package_default.version).requiredOption("-i, --input <file>", "path to the PyTorch model (.pt)").requiredOption( "-o, --output <dir>", "output directory for the wrapped model" ).option("--quantize", "apply quantization to reduce model size").option("--format <type>", "export format: onnx (default)", "onnx").option( "--backend <backend>", "inference backend: webgpu | wasm | cpu", "webgpu" ).option( "--logLevel <level>", "set log level: silent | error | warn | info | debug", "error" ).action(async (opts) => { checkPythonAvailable(); checkPythonDeps(); const input = path3.resolve(opts.input); const output = path3.resolve(opts.output); setLogLevel( process.env.LOGLEVEL || opts.logLevel || "error" ); if (!existsSync(input)) { console.error( `${chalk2.red.bold("\u2718 Input file not found:")} ${chalk2.white(input)}` ); process.exit(1); } const spinner = ora("Wrapture: Exporting model...").start(); try { await convert(input, output, opts); await generateWrapper(output, opts); spinner.succeed("Done! Your model is wrapped and ready."); } catch (err) { spinner.fail("Failed to export model:"); console.error(err); process.exit(1); } }); program.parse(process.argv);