@needle-tools/engine
Version:
Needle Engine is a web-based runtime for 3D apps. It runs on your machine for development with great integrations into editors like Unity or Blender - and can be deployed onto any device! It is flexible, extensible and networking and XR are built-in.
600 lines (541 loc) • 26.9 kB
JavaScript
import { ChildProcess, exec } from 'child_process';
import { NEEDLE_CLOUD_CLI_NAME } from '../common/cloud.js';
import { getOutputDirectory, loadConfig } from './config.js';
import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, statSync, writeFileSync } from 'fs';
import { relative } from 'path';
import { copyFilesSync, formatBytes } from '../common/files.js';
import { delay } from '../common/timers.js';
import { needleBlue, needleDim, needleLog, needleSupportsColor, setTransientLogLineCleaner } from './logging.js';
const PIPELINE_SPINNER_FRAMES = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
const PIPELINE_STRUCTURED_LOG_PREFIX = "__needle_pipeline_log__:";
/**
* @param {import('../types').userSettings} config
* @returns {boolean}
*/
function validateCloudBuildConfiguration(config) {
if (config.buildPipeline != undefined) {
if (process.env.CI) {
if ((process.env.NEEDLE_CLOUD_TOKEN == undefined || process.env.NEEDLE_CLOUD_TOKEN.length <= 0)) {
const isGithubAction = process.env.CI && process.env.GITHUB_ACTION;
let msg = `Missing Needle Cloud access token. Please set your Needle Cloud token via \`process.env.NEEDLE_CLOUD_TOKEN\`.`;
if (isGithubAction) {
msg += `
Make sure to pass the token as a secret to the Github action.
For this you may have to modify your .github workflow yaml file to include the following code:
env:
- NEEDLE_CLOUD_TOKEN: \${{ secrets.NEEDLE_CLOUD_TOKEN }}
`;
let error = `${msg}`;
throw new Error(error);
}
}
}
}
return true;
}
// see https://linear.app/needle/issue/NE-3798
/** @type {Promise<void>|null} */
let buildPipelineTask;
/** @type {null | {tempDirectory:string, outputDirectory:string}} */
let buildPipelineTaskResults = null;
/** @type {null | string} */
let buildPipelineStepSummary = null;
/**
* This function can be used by other plugins to wait for the build pipeline to finish before doing some work in the closeBundle hook.
* This event is triggered *after* the build pipeline has finished all its work AND the results have been copied to their final output directory.
*/
export function waitForBuildPipelineToFinish() {
return buildPipelineTask;
}
/** @type {Promise<void>|null} */
let buildPipelineCopyTask = null;
export function waitForBuildPipelineCopyToFinish() {
return buildPipelineCopyTask;
}
/** This time is set when a build plugin is triggered to run
* We increase the time by 10-20 seconds each time because we might have a multi step process
* where the build pipeline is triggered in the SSR build (which we can not always detect)
* and the final client build is triggered later (but the build pipeline is still running and waiting)
*/
let maxOutputDirectoryCreatedWaitTime = 0;
let defaultWaitTime = 60_000; // 60 seconds
/**
* @param {boolean} debugLog
*/
function increaseMaxWaitTime(debugLog) {
maxOutputDirectoryCreatedWaitTime = Date.now();
let maxAdditionalWaitTime = defaultWaitTime;
if (process.env.CI) {
maxAdditionalWaitTime += 60_000;
}
maxOutputDirectoryCreatedWaitTime += maxAdditionalWaitTime;
if (debugLog) {
log(`Increased max wait time by ${maxAdditionalWaitTime / 1000}sec until ${new Date(maxOutputDirectoryCreatedWaitTime).toISOString()}`);
}
}
/** Runs the needle build pipeline as part of the vite build process.
* @param {"build" | "serve"} command
* @param {import('../types/needleConfig').needleMeta | null | undefined} config
* @param {import('../types').userSettings} userSettings
* @returns {import('vite').Plugin}
*/
export async function needleBuildPipeline(command, config, userSettings) {
// we only want to run compression here if this is a distribution build
// this is handled however in the `apply` hook
if (userSettings.noBuildPipeline) return null;
if (userSettings.buildPipeline?.enabled === false) {
log("Skipping build pipeline because it is disabled in the user settings via `buildPipeline.enabled: false`");
return null;
}
const packageJsonPath = process.cwd() + "/package.json";
await fixPackageJson(packageJsonPath);
if (userSettings.buildPipeline?.maxWaitDuration !== undefined && typeof userSettings.buildPipeline?.maxWaitDuration === "number" && userSettings.buildPipeline?.maxWaitDuration >= 0) {
log(`Set timeout to ${userSettings.buildPipeline?.maxWaitDuration}ms as defined in your config`);
defaultWaitTime = userSettings.buildPipeline?.maxWaitDuration;
}
let shouldRun = false;
// Check env var (set by Unity/Blender integrations) or CLI arg (for manual invocation)
if (process.env.NEEDLE_BUILD_PRODUCTION === "true" || process.argv.indexOf("--production") >= 0) {
shouldRun = true;
}
if (!shouldRun) {
if (command === "build") {
log("Skipping build pipeline because this is a development build.\n- Invoke with `--production` to run the build pipeline.\n- For example \"vite build -- --production\" or set NEEDLE_BUILD_PRODUCTION=true.");
}
await new Promise((resolve, _) => setTimeout(resolve, 1000));
return null;
}
if (process.env.CI) {
log("Running in CI environment");
}
validateCloudBuildConfiguration(userSettings);
const verboseOutput = userSettings?.buildPipeline?.verbose || false;
let taskHasCompleted = false;
let taskSucceeded = false;
return {
name: 'needle:buildpipeline',
enforce: "post",
apply: (_conf, env) => {
if (verboseOutput) {
log("Apply:", env);
}
// Don't run for SSR builds (e.g. sveltekit).
// Unfortunately this is always false in vite 4.3 so we can not rely on it solely
// Vite 8+ renamed ssrBuild to isSsrBuild
if (env.isSsrBuild ?? env.ssrBuild) return false;
// Dont run if there's already a build pipeline task running
if (env.command === "build") {
increaseMaxWaitTime(verboseOutput);
if (buildPipelineTask) {
return false;
}
return true;
}
return false;
},
buildEnd(error) {
increaseMaxWaitTime(verboseOutput);
if (verboseOutput) {
log("Build end:", error ?? "No error");
}
if (error) {
// if there was an error during the build we should not run the build pipeline
}
else {
if (buildPipelineTask) {
log("Build pipeline already running...");
return;
}
// start the compression process once vite is done copying the files
buildPipelineTask = invokeBuildPipeline(userSettings, { verbose: verboseOutput })
.then((res) => {
if (verboseOutput) log("Build pipeline task result:", res);
taskSucceeded = res;
})
.catch((/** @type {{ message?: string }} */ err) => {
needleLog("needle-buildpipeline", "- Error during build pipeline: " + err.message, "error");
if (verboseOutput) log("Error details:", err);
})
.finally(() => {
taskHasCompleted = true;
if (!taskSucceeded) {
needleLog("needle-buildpipeline", "- Build pipeline task did not succeed.", "error");
throw new Error("[needle-buildpipeline] - Build pipeline failed. Please check the logs above for more information.");
}
});
}
},
closeBundle() {
if (!buildPipelineTask) {
return;
}
// // this is the last hook that is called, so we can wait for the task to finish here
return buildPipelineTask = buildPipelineTask?.then(() => {
const lines = /** @type {string[]} */ ([]);
if (buildPipelineStepSummary) {
lines.push(buildPipelineStepSummary);
}
if (taskSucceeded) {
lines.push(needleDim(`✓ Finished successfully`));
}
// Copy the results to their final output directory.
if (buildPipelineTaskResults != null) {
const supportsColor = needleSupportsColor();
const key = (/** @type {string} */ text) => supportsColor ? needleBlue(text) : text;
const outputPath = relative(process.cwd(), buildPipelineTaskResults.outputDirectory).replaceAll("\\", "/") || ".";
const moved = getDirectoryStats(buildPipelineTaskResults.tempDirectory);
const ctx = { count: 0, bytes: 0 }
copyFilesSync(buildPipelineTaskResults.tempDirectory, buildPipelineTaskResults.outputDirectory, true, ctx);
// Clean up source files that were replaced by the build pipeline
// (e.g. .exr files that now have .pmrem.ktx2 replacements)
const outputDir = buildPipelineTaskResults.outputDirectory;
try {
for (const file of readdirSync(outputDir)) {
if (file.endsWith('.exr')) {
const replacement = file.replace(/\.exr$/i, '.pmrem.ktx2');
if (existsSync(outputDir + '/' + replacement)) {
rmSync(outputDir + '/' + file);
}
}
}
} catch { /* silent */ }
lines.push(`${key("Copying compressed results")}: "${outputPath}" — ${moved.fileCount} file${moved.fileCount !== 1 ? 's' : ''}, ${formatBytes(moved.totalBytes)}`);
}
else {
lines.push("No files to copy - build pipeline did not run or did not finish successfully");
}
if (lines.length > 0) {
needleLog("needle-buildpipeline", lines.join("\n"), "log", { dimBody: false });
}
buildPipelineStepSummary = null;
});
},
}
}
/**
* Previously we did always install the build pipeline and run an extra command to invoke the build pipeline.
* This is now done automatically by the needle build pipeline plugin - so we update all legacy projects to use the new method.
* @param {string} packageJsonPath
*/
async function fixPackageJson(packageJsonPath) {
if (!existsSync(packageJsonPath)) {
return;
}
const text = readFileSync(packageJsonPath, "utf8");
const oldScript = `"build:production": "npm run build:dev && npm run gltf:transform"`;
const newScript = `"build:production": "vite build -- --production"`;
const fixed = text.replace(oldScript, newScript);
if (fixed === text) return;
log("Automatically updated package.json production build script");
log("- FROM " + oldScript);
log("- TO " + newScript);
writeFileSync(packageJsonPath, fixed);
}
/** @param {...unknown} args */
function log(...args) {
needleLog("needle-buildpipeline", args.join(" "));
}
/** @param {...unknown} args */
function warn(...args) {
needleLog("needle-buildpipeline", args.join(" "), "warn");
}
/**
* @typedef {{ event?: string, phase?: string, target?: string, message?: string, level?: string }} BuildPipelinePayload
*/
/**
* @param {import('../types').userSettings} opts
* @param {{verbose?:boolean}} [options]
* @returns {Promise<boolean>}
*/
async function invokeBuildPipeline(opts, options = {}) {
const rel = (/** @type {string} */ pathValue) => {
const value = relative(process.cwd(), pathValue).replaceAll("\\", "/");
return value?.length ? value : ".";
};
const supportsColor = needleSupportsColor();
const key = (/** @type {string} */ text) => supportsColor ? needleBlue(text) : text;
const installPath = "node_modules/@needle-tools/gltf-build-pipeline";
const fullInstallPath = process.cwd() + "/" + installPath;
const existsLocally = existsSync(fullInstallPath);
if (existsLocally) {
log("Found local installation at " + fullInstallPath);
}
await delay(500);
const outputDirectory = getOutputDirectory() + "/assets";
const startWaitTime = Date.now();
const maxEndTime = startWaitTime + 300_000;
/** wait until the output directory exists
* @param {number} iteration
* @returns {Promise<boolean>}
*/
function waitForOutputDirectory(iteration) {
// we wait for the output directory
if (!existsSync(outputDirectory)) {
if (maxOutputDirectoryCreatedWaitTime != 0 && Date.now() > maxOutputDirectoryCreatedWaitTime) {
const waitDuration = (Date.now() - startWaitTime) / 1000;
log(`Aborting after ${waitDuration} seconds... Your website bundling process did take longer than expected. Try increasing the timeout via the \`buildPipeline.timeoutDuration\` setting in your vite.config.js (default is 60 seconds).`);
return Promise.resolve(false);
}
else if (Date.now() > maxEndTime) {
log("Max wait time exceeded - aborting...");
return Promise.resolve(false);
}
if (iteration <= 0) needleLog("needle-buildpipeline", `Waiting for output directory to be created... (${outputDirectory})`, "log", { leadingNewline: true });
return delay(1000).then(() => waitForOutputDirectory(iteration + 1));
}
if (options?.verbose) log(`Output directory found after ${iteration} iteration(s) at "${outputDirectory}" - continuing...`);
return Promise.resolve(true);
}
if (!await waitForOutputDirectory(0)) {
warn(`Output directory not found/created at \"${outputDirectory}\" - aborting...`);
return false;
}
const files = readdirSync(outputDirectory).filter(f => f.endsWith(".glb") || f.endsWith(".gltf") || f.endsWith(".vrm") || f.endsWith(".fbx"));
const filesBytes = files.reduce((total, file) => {
try {
return total + statSync(outputDirectory + "/" + file).size;
}
catch {
return total;
}
}, 0);
needleLog("needle-buildpipeline", [
`${key("Files to process")}: ${files.length} in ${rel(outputDirectory)}, ${formatBytes(filesBytes)}`,
existsSync(process.cwd() + "/node_modules/.needle/build-pipeline/output") ? needleDim("Removing temporary output directory") : undefined,
].filter(Boolean), "log", { dimBody: false });
/** @type {null | ChildProcess} */
let proc = null;
let cloudAccessToken = opts.buildPipeline?.accessToken || opts.license?.accessToken;
if (!cloudAccessToken) {
cloudAccessToken = process.env.NEEDLE_CLOUD_TOKEN;
}
const runInCloud = typeof cloudAccessToken === "string" && cloudAccessToken.length > 0;
// if a user has defined the build pipeline settings object but not passed in a token we should print out some information
// or perhaps log an error / prevent the build from running completely
if (opts.buildPipeline && !runInCloud && process.env.CI) {
warn(`No cloud access token found. Please set it via process.env.NEEDLE_CLOUD_TOKEN`);
return false;
}
// put the processed files first in a temporary directory. They will be moved to the output directory at the end of the buildstep
// this is so that processes like sveltekit-static-adapter can run first and does not override already compressed files
const tempOutputPath = process.cwd() + "/node_modules/.needle/build-pipeline/output";
if (existsSync(tempOutputPath)) {
rmSync(tempOutputPath, { recursive: true, force: true });
}
mkdirSync(tempOutputPath, { recursive: true });
/** @param {number} code */
function onExit(code) {
if (code === 0) {
buildPipelineTaskResults = {
tempDirectory: tempOutputPath,
outputDirectory: outputDirectory
}
}
}
// allow running the build pipeline in the cloud. It requires and access token to be set in the vite.config.js
// this can be set via e.g. process.env.NEEDLE_CLOUD_TOKEN
const commandEnv = { ...process.env, NEEDLE_PIPELINE_STRUCTURED_LOGS: "1" };
if (runInCloud) {
if (!cloudAccessToken || !(typeof cloudAccessToken === "string") || cloudAccessToken.length <= 0) {
throw new Error("No cloud access token configured. Please set it via process.env.NEEDLE_CLOUD_TOKEN or in the vite.config.js");
}
let cmd = `npx --yes ${NEEDLE_CLOUD_CLI_NAME} optimize "${outputDirectory}" --token ${cloudAccessToken}`;
let projectName = opts.buildPipeline?.projectName;
// Default project name for compression
// TODO: maybe this should be taken from the package.json name field or needle.config?
if (!projectName) {
projectName = "compression";
}
if (projectName) {
cmd += ` --name "${projectName}"`;
}
if (opts.buildPipeline?.verbose === true) {
cmd += " --verbose";
}
cmd += " --outdir \"" + tempOutputPath + "\"";
console.log("\n");
const obfuscatedToken = `${cloudAccessToken.slice(0, 2)}*****${cloudAccessToken.slice(-2)}`;
log(`Running compression in cloud ⛅ using access token: ${obfuscatedToken}`);
proc = exec(cmd, { env: commandEnv });
}
else if (existsLocally) {
const cmd = `needle-gltf transform "${outputDirectory}" \"${tempOutputPath}\"`;
log("Running command \"" + cmd + "\" at " + process.cwd() + "...");
proc = exec(cmd, { cwd: installPath, env: commandEnv });
}
else {
// First check if the user passed in a specific version to use via the vite config
let version = opts.buildPipeline?.version;
let versionSource = version ? "vite" : "";
// If not, check env var (set by Unity/Blender integrations)
if (!version && process.env.NEEDLE_BUILD_PIPELINE_VERSION) {
version = process.env.NEEDLE_BUILD_PIPELINE_VERSION;
versionSource = "env";
}
// Fallback: check CLI arg for backwards compatibility
if (!version) {
for (let i = 0; i < process.argv.length; i++) {
if (process.argv[i] === "--build-pipeline-version" && i < process.argv.length - 1) {
const value = process.argv[i + 1]?.replace(/['"]+/g, '').trim();
if (value) {
version = value;
versionSource = "arg";
}
break;
}
}
}
// Otherwise we default to the stable version on npm
if (!version) version = "stable";
const versionInfo = versionSource ? `'${version}' (${versionSource})` : `'${version}'`;
const cmd = `npx --yes @needle-tools/gltf-build-pipeline@${version} transform "${outputDirectory}" \"${tempOutputPath}\"`;
log(`Running compression locally using version ${versionInfo}`);
proc = exec(cmd, { env: commandEnv });
}
let pipelineSpinnerIndex = 0;
let pipelineSpinnerActive = false;
let transformStepCount = 0;
let compressStepCount = 0;
function clearPipelineProgress() {
if (!process.stdout.isTTY || !pipelineSpinnerActive) return;
process.stdout.write("\r\x1b[2K");
pipelineSpinnerActive = false;
}
/** @param {string} text */
function updatePipelineProgress(text) {
if (!process.stdout.isTTY) return;
const frame = PIPELINE_SPINNER_FRAMES[pipelineSpinnerIndex++ % PIPELINE_SPINNER_FRAMES.length];
const maxLength = Math.max(24, (process.stdout.columns || 120) - 4);
const value = text.length > maxLength ? `${text.slice(0, Math.max(0, maxLength - 1))}…` : text;
process.stdout.write(`\r\x1b[2K${frame} ${value}\x1b[0K`);
pipelineSpinnerActive = true;
}
setTransientLogLineCleaner(() => clearPipelineProgress());
/** @param {Buffer|string} data */
function onLog(data) {
if (data.length <= 0) return;
const str = String(data).replace(/\r/g, "");
const lines = str.split("\n");
for (let line of lines) {
if (!line?.trim().length) continue;
if (line.startsWith(PIPELINE_STRUCTURED_LOG_PREFIX)) {
let payload = /** @type {BuildPipelinePayload | null} */ (null);
try {
payload = /** @type {BuildPipelinePayload} */ (JSON.parse(line.slice(PIPELINE_STRUCTURED_LOG_PREFIX.length)));
}
catch {
payload = null;
}
if (payload) {
if (payload.event === "progress") {
if (payload.phase === "transform") transformStepCount++;
if (payload.phase === "compress") compressStepCount++;
updatePipelineProgress(`Build pipeline ${payload.phase === "compress" ? "Compressing" : "Transform"} ${payload.target || payload.message || ""}`.trim());
continue;
}
clearPipelineProgress();
if (payload.event === "summary") {
needleLog("needle-buildpipeline", payload.message || "Build pipeline summary", "log", { showHeader: false, leadingNewline: true });
continue;
}
const level = String(payload.level || "info").toLowerCase();
const message = payload.message || line;
if (level === "error") {
needleLog("needle-buildpipeline", message, "error", { dimBody: false, showHeader: false, leadingNewline: true });
}
else if (level === "warn") {
needleLog("needle-buildpipeline", message, "warn", { dimBody: false, showHeader: false, leadingNewline: true });
}
else {
needleLog("needle-buildpipeline", message, "log", { showHeader: false, leadingNewline: true });
}
continue;
}
}
if (line.startsWith("info: [Needle Build Pipeline]") || line.startsWith("info: No \"gltf\" config found") || line.startsWith("info: No config found") || line.startsWith("Limit cache size to ") || line.startsWith("Current cache size is ")) {
continue;
}
if (line.startsWith("[NEEDLE_progressive] Skipping")) {
continue;
}
if (line.startsWith("objc[") || line.includes("Class GNotificationCenterDelegate is implemented in both") || line.includes("This may cause spurious casting failures and mysterious crashes")) {
continue;
}
if (line.startsWith("INFO: Environment variable 'NEEDLE_TOKTX' not set")) {
continue;
}
if (line.startsWith("metalRough: KHR_materials_pbrSpecularGlossiness not found")) {
continue;
}
if (line.startsWith("WARN: Could not validate image type")) {
continue;
}
const progressMatch = line.match(/^info:\s*→\s*(Transform|Compressing)\s+(.+)$/i);
if (progressMatch) {
if (progressMatch[1].toLowerCase() === "transform") transformStepCount++;
if (progressMatch[1].toLowerCase() === "compressing") compressStepCount++;
updatePipelineProgress(`Build pipeline ${progressMatch[1]} ${progressMatch[2]}`);
continue;
}
if (line.startsWith("info: ← Writing to ") || line.startsWith("info: ← Compressing done in ")) {
continue;
}
clearPipelineProgress();
if (line.startsWith("ERR:")) {
needleLog("needle-buildpipeline", line, "error", { dimBody: false, showHeader: false, leadingNewline: true });
continue;
}
else if (line.startsWith("WARN:")) {
needleLog("needle-buildpipeline", line, "warn", { dimBody: false, showHeader: false, leadingNewline: true });
continue;
}
const shouldDim = line.includes("Loaded compressed file from cache");
needleLog("needle-buildpipeline", line, "log", { showHeader: false, leadingNewline: true, dimBody: shouldDim });
}
}
proc.stdout?.on('data', onLog);
proc.stderr?.on('data', onLog);
return new Promise((resolve, reject) => {
proc.on('exit', (code) => {
clearPipelineProgress();
setTransientLogLineCleaner(null);
if (transformStepCount > 0 || compressStepCount > 0) {
buildPipelineStepSummary = `✓ Pipeline steps: transformed ${transformStepCount} file(s), compressed ${compressStepCount} file(s)`;
}
else buildPipelineStepSummary = null;
if (code === null || code === undefined) {
if (options?.verbose) log("Process exited with no code - assuming success");
code = 0;
}
const success = code === 0;
if (!success) log(`Process failed with exit code ${code}`);
onExit(code || 0);
resolve(success);
});
});
}
/** @param {string | null | undefined} directory */
function getDirectoryStats(directory) {
if (!directory || !existsSync(directory)) return { fileCount: 0, totalBytes: 0 };
let fileCount = 0;
let totalBytes = 0;
const entries = readdirSync(directory, { withFileTypes: true });
for (const entry of entries) {
const path = directory + "/" + entry.name;
if (entry.isDirectory()) {
const stats = getDirectoryStats(path);
fileCount += stats.fileCount;
totalBytes += stats.totalBytes;
}
else {
try {
const stat = statSync(path);
fileCount += 1;
totalBytes += stat.size;
}
catch {
}
}
}
return { fileCount, totalBytes };
}