UNPKG

@rushstack/heft

Version:

Build all your JavaScript projects the same way: A way that works.

437 lines 22.8 kB
"use strict"; // Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license. // See LICENSE in the project root for license information. var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.HeftActionRunner = void 0; exports.initializeHeft = initializeHeft; exports.ensureCliAbortSignal = ensureCliAbortSignal; exports.runWithLoggingAsync = runWithLoggingAsync; const node_perf_hooks_1 = require("node:perf_hooks"); const node_readline_1 = require("node:readline"); const node_os_1 = __importDefault(require("node:os")); const node_core_library_1 = require("@rushstack/node-core-library"); const terminal_1 = require("@rushstack/terminal"); const operation_graph_1 = require("@rushstack/operation-graph"); const HeftParameterManager_1 = require("../pluginFramework/HeftParameterManager"); const TaskOperationRunner_1 = require("../operations/runners/TaskOperationRunner"); const PhaseOperationRunner_1 = require("../operations/runners/PhaseOperationRunner"); const DeleteFilesPlugin_1 = require("../plugins/DeleteFilesPlugin"); const Constants_1 = require("../utilities/Constants"); function initializeHeft(heftConfiguration, terminal, isVerbose) { // Ensure that verbose is enabled on the terminal if requested. terminalProvider.verboseEnabled // should already be `true` if the `--debug` flag was provided. This is set in HeftCommandLineParser if (heftConfiguration.terminalProvider instanceof terminal_1.ConsoleTerminalProvider) { heftConfiguration.terminalProvider.verboseEnabled = heftConfiguration.terminalProvider.verboseEnabled || isVerbose; } // Log some information about the execution const projectPackageJson = heftConfiguration.projectPackageJson; terminal.writeVerboseLine(`Project: ${projectPackageJson.name}@${projectPackageJson.version}`); terminal.writeVerboseLine(`Project build folder: ${heftConfiguration.buildFolderPath}`); if (heftConfiguration.rigConfig.rigFound) { terminal.writeVerboseLine(`Rig package: ${heftConfiguration.rigConfig.rigPackageName}`); terminal.writeVerboseLine(`Rig profile: ${heftConfiguration.rigConfig.rigProfile}`); } terminal.writeVerboseLine(`Heft version: ${heftConfiguration.heftPackageJson.version}`); terminal.writeVerboseLine(`Node version: ${process.version}`); terminal.writeVerboseLine(''); } let _cliAbortSignal; function ensureCliAbortSignal(terminal) { if (!_cliAbortSignal) { // Set up the ability to terminate the build via Ctrl+C and have it exit gracefully if pressed once, // less gracefully if pressed a second time. const cliAbortController = new AbortController(); _cliAbortSignal = cliAbortController.signal; const cli = (0, node_readline_1.createInterface)(process.stdin, undefined, undefined, true); let forceTerminate = false; cli.on('SIGINT', () => { cli.close(); if (forceTerminate) { terminal.writeErrorLine(`Forcibly terminating.`); process.exit(1); } else { terminal.writeLine(terminal_1.Colorize.yellow(terminal_1.Colorize.bold(`Canceling... Press Ctrl+C again to forcibly terminate.`))); } forceTerminate = true; cliAbortController.abort(); }); } return _cliAbortSignal; } async function runWithLoggingAsync(fn, action, loggingManager, terminal, metricsCollector, abortSignal, throwOnFailure) { const startTime = node_perf_hooks_1.performance.now(); loggingManager.resetScopedLoggerErrorsAndWarnings(); let result = operation_graph_1.OperationStatus.Failure; // Execute the action operations let encounteredError = false; try { result = await fn(); if (result === operation_graph_1.OperationStatus.Failure) { encounteredError = true; } } catch (e) { encounteredError = true; throw e; } finally { const warningStrings = loggingManager.getWarningStrings(); const errorStrings = loggingManager.getErrorStrings(); const wasAborted = abortSignal.aborted; const encounteredWarnings = warningStrings.length > 0 || wasAborted; encounteredError = encounteredError || errorStrings.length > 0; await metricsCollector.recordAsync(action.actionName, { encounteredError }, action.getParameterStringMap()); const finishedLoggingWord = encounteredError ? 'Failed' : wasAborted ? 'Aborted' : 'Finished'; const duration = node_perf_hooks_1.performance.now() - startTime; const durationSeconds = Math.round(duration) / 1000; const finishedLoggingLine = `-------------------- ${finishedLoggingWord} (${durationSeconds}s) --------------------`; terminal.writeLine(terminal_1.Colorize.bold((encounteredError ? terminal_1.Colorize.red : encounteredWarnings ? terminal_1.Colorize.yellow : terminal_1.Colorize.green)(finishedLoggingLine))); if (warningStrings.length > 0) { terminal.writeWarningLine(`Encountered ${warningStrings.length} warning${warningStrings.length === 1 ? '' : 's'}`); for (const warningString of warningStrings) { terminal.writeWarningLine(` ${warningString}`); } } if (errorStrings.length > 0) { terminal.writeErrorLine(`Encountered ${errorStrings.length} error${errorStrings.length === 1 ? '' : 's'}`); for (const errorString of errorStrings) { terminal.writeErrorLine(` ${errorString}`); } } } if (encounteredError && throwOnFailure) { throw new node_core_library_1.AlreadyReportedError(); } return result; } class HeftActionRunner { constructor(options) { const { action, internalHeftSession, heftConfiguration, loggingManager, terminal, metricsCollector } = options; this._action = action; this._internalHeftSession = internalHeftSession; this._heftConfiguration = heftConfiguration; this._loggingManager = loggingManager; this._terminal = terminal; this._metricsCollector = metricsCollector; const numberOfCores = heftConfiguration.numberOfCores; // If an explicit parallelism number wasn't provided, then choose a sensible // default. if (node_os_1.default.platform() === 'win32') { // On desktop Windows, some people have complained that their system becomes // sluggish if Node is using all the CPU cores. Leave one thread for // other operations. For CI environments, you can use the "max" argument to use all available cores. this._parallelism = Math.max(numberOfCores - 1, 1); } else { // Unix-like operating systems have more balanced scheduling, so default // to the number of CPU cores this._parallelism = numberOfCores; } } get parameterManager() { if (!this._parameterManager) { throw new node_core_library_1.InternalError(`HeftActionRunner.defineParameters() has not been called.`); } return this._parameterManager; } defineParameters(parameterProvider) { if (!this._parameterManager) { // Use the provided parameter provider if one was provided. This is used by the RunAction // to allow for the Heft plugin parameters to be applied as scoped parameters. parameterProvider = parameterProvider || this._action; } else { throw new node_core_library_1.InternalError(`HeftActionParameters.defineParameters() has already been called.`); } const verboseFlag = parameterProvider.defineFlagParameter({ parameterLongName: Constants_1.Constants.verboseParameterLongName, parameterShortName: Constants_1.Constants.verboseParameterShortName, description: 'If specified, log information useful for debugging.' }); const productionFlag = parameterProvider.defineFlagParameter({ parameterLongName: Constants_1.Constants.productionParameterLongName, description: 'If specified, run Heft in production mode.' }); const localesParameter = parameterProvider.defineStringListParameter({ parameterLongName: Constants_1.Constants.localesParameterLongName, argumentName: 'LOCALE', description: 'Use the specified locale for this run, if applicable.' }); let cleanFlagDescription = 'If specified, clean the outputs at the beginning of the lifecycle and before running each phase.'; if (this._action.watch) { cleanFlagDescription = `${cleanFlagDescription} Cleaning will only be performed once for the lifecycle and each phase, ` + `and further incremental runs will not be cleaned for the duration of execution.`; } const cleanFlag = parameterProvider.defineFlagParameter({ parameterLongName: Constants_1.Constants.cleanParameterLongName, description: cleanFlagDescription }); const parameterManager = new HeftParameterManager_1.HeftParameterManager({ getIsDebug: () => this._internalHeftSession.debug, getIsVerbose: () => verboseFlag.value, getIsProduction: () => productionFlag.value, getIsWatch: () => this._action.watch, getLocales: () => localesParameter.values, getIsClean: () => !!(cleanFlag === null || cleanFlag === void 0 ? void 0 : cleanFlag.value) }); // Add all the lifecycle parameters for the action for (const lifecyclePluginDefinition of this._internalHeftSession.lifecycle.pluginDefinitions) { parameterManager.addPluginParameters(lifecyclePluginDefinition); } // Add all the task parameters for the action for (const phase of this._action.selectedPhases) { for (const task of phase.tasks) { parameterManager.addPluginParameters(task.pluginDefinition); } } // Finalize and apply to the CommandLineParameterProvider parameterManager.finalizeParameters(parameterProvider); this._parameterManager = parameterManager; } async executeAsync() { const terminal = this._terminal; // Set the parameter manager on the internal session, which is used to provide the selected // parameters to plugins. Set this in onExecute() since we now know that this action is being // executed, and the session should be populated with the executing parameters. this._internalHeftSession.parameterManager = this.parameterManager; initializeHeft(this._heftConfiguration, terminal, this.parameterManager.defaultParameters.verbose); const operations = this._generateOperations(); const executionManager = new operation_graph_1.OperationExecutionManager(operations); const cliAbortSignal = ensureCliAbortSignal(this._terminal); try { await _startLifecycleAsync(this._internalHeftSession); if (this._action.watch) { const watchLoop = this._createWatchLoop(executionManager); if (process.send) { await watchLoop.runIPCAsync(); } else { await watchLoop.runUntilAbortedAsync(cliAbortSignal, () => { terminal.writeLine(terminal_1.Colorize.bold('Waiting for changes. Press CTRL + C to exit...')); terminal.writeLine(''); }); } } else { await this._executeOnceAsync(executionManager, cliAbortSignal); } } finally { // Invoke this here both to ensure it always runs and that it does so after recordMetrics // This is treated as a finalizer for any assets created in lifecycle plugins. // It is the responsibility of the lifecycle plugin to ensure that finish gracefully handles // aborted runs. await _finishLifecycleAsync(this._internalHeftSession); } } _createWatchLoop(executionManager) { const { _terminal: terminal } = this; const watchLoop = new operation_graph_1.WatchLoop({ onBeforeExecute: () => { // Write an empty line to the terminal for separation between iterations. We've already iterated // at this point, so log out that we're about to start a new run. terminal.writeLine(''); terminal.writeLine(terminal_1.Colorize.bold('Starting incremental build...')); }, executeAsync: (state) => { return this._executeOnceAsync(executionManager, state.abortSignal, state.requestRun); }, onRequestRun: (requestor) => { terminal.writeLine(terminal_1.Colorize.bold(`New run requested by ${requestor || 'unknown task'}`)); }, onAbort: () => { terminal.writeLine(terminal_1.Colorize.bold(`Cancelling incremental build...`)); } }); return watchLoop; } async _executeOnceAsync(executionManager, abortSignal, requestRun) { const { taskStart, taskFinish, phaseStart, phaseFinish } = this._internalHeftSession.lifecycle.hooks; // Record this as the start of task execution. this._metricsCollector.setStartTime(); // Execute the action operations return await runWithLoggingAsync(() => { const operationExecutionManagerOptions = { terminal: this._terminal, parallelism: this._parallelism, abortSignal, requestRun, beforeExecuteOperation(operation) { if (taskStart.isUsed()) { taskStart.call({ operation }); } }, afterExecuteOperation(operation) { if (taskFinish.isUsed()) { taskFinish.call({ operation }); } }, beforeExecuteOperationGroup(operationGroup) { if (operationGroup.metadata.phase && phaseStart.isUsed()) { phaseStart.call({ operation: operationGroup }); } }, afterExecuteOperationGroup(operationGroup) { if (operationGroup.metadata.phase && phaseFinish.isUsed()) { phaseFinish.call({ operation: operationGroup }); } } }; return executionManager.executeAsync(operationExecutionManagerOptions); }, this._action, this._loggingManager, this._terminal, this._metricsCollector, abortSignal, !requestRun); } _generateOperations() { const { selectedPhases } = this._action; const operations = new Map(); const operationGroups = new Map(); const internalHeftSession = this._internalHeftSession; let hasWarnedAboutSkippedPhases = false; for (const phase of selectedPhases) { // Warn if any dependencies are excluded from the list of selected phases if (!hasWarnedAboutSkippedPhases) { for (const dependencyPhase of phase.dependencyPhases) { if (!selectedPhases.has(dependencyPhase)) { // Only write once, and write with yellow to make it stand out without writing a warning to stderr hasWarnedAboutSkippedPhases = true; this._terminal.writeLine(terminal_1.Colorize.bold('The provided list of phases does not contain all phase dependencies. You may need to run the ' + 'excluded phases manually.')); break; } } } // Create operation for the phase start node const phaseOperation = _getOrCreatePhaseOperation(internalHeftSession, phase, operations, operationGroups); // Create operations for each task for (const task of phase.tasks) { const taskOperation = _getOrCreateTaskOperation(internalHeftSession, task, operations, operationGroups); // Set the phase operation as a dependency of the task operation to ensure the phase operation runs first taskOperation.addDependency(phaseOperation); // Set all dependency tasks as dependencies of the task operation for (const dependencyTask of task.dependencyTasks) { taskOperation.addDependency(_getOrCreateTaskOperation(internalHeftSession, dependencyTask, operations, operationGroups)); } // Set all tasks in a in a phase as dependencies of the consuming phase for (const consumingPhase of phase.consumingPhases) { if (this._action.selectedPhases.has(consumingPhase)) { // Set all tasks in a dependency phase as dependencies of the consuming phase to ensure the dependency // tasks run first const consumingPhaseOperation = _getOrCreatePhaseOperation(internalHeftSession, consumingPhase, operations, operationGroups); consumingPhaseOperation.addDependency(taskOperation); // This is purely to simplify the reported graph for phase circularities consumingPhaseOperation.addDependency(phaseOperation); } } } } return new Set(operations.values()); } } exports.HeftActionRunner = HeftActionRunner; function _getOrCreatePhaseOperation(internalHeftSession, phase, operations, operationGroups) { const key = phase.phaseName; let operation = operations.get(key); if (!operation) { let group = operationGroups.get(phase.phaseName); if (!group) { group = new operation_graph_1.OperationGroupRecord(phase.phaseName, { phase }); operationGroups.set(phase.phaseName, group); } // Only create the operation. Dependencies are hooked up separately operation = new operation_graph_1.Operation({ group, name: phase.phaseName, runner: new PhaseOperationRunner_1.PhaseOperationRunner({ phase, internalHeftSession }) }); operations.set(key, operation); } return operation; } function _getOrCreateTaskOperation(internalHeftSession, task, operations, operationGroups) { const key = `${task.parentPhase.phaseName}.${task.taskName}`; let operation = operations.get(key); if (!operation) { const group = operationGroups.get(task.parentPhase.phaseName); if (!group) { throw new node_core_library_1.InternalError(`Task ${task.taskName} in phase ${task.parentPhase.phaseName} has no group. This should not happen.`); } operation = new operation_graph_1.Operation({ group, runner: new TaskOperationRunner_1.TaskOperationRunner({ internalHeftSession, task }), name: task.taskName, metadata: { task, phase: task.parentPhase } }); operations.set(key, operation); } return operation; } async function _startLifecycleAsync(internalHeftSession) { const { clean } = internalHeftSession.parameterManager.defaultParameters; // Load and apply the lifecycle plugins const lifecycle = internalHeftSession.lifecycle; const { lifecycleLogger } = lifecycle; await lifecycle.applyPluginsAsync(lifecycleLogger.terminal); if (lifecycleLogger.hasErrors) { throw new node_core_library_1.AlreadyReportedError(); } if (clean) { const startTime = node_perf_hooks_1.performance.now(); lifecycleLogger.terminal.writeVerboseLine('Starting clean'); // Grab the additional clean operations from the phase const deleteOperations = []; // Delete all temp folders for tasks by default for (const pluginDefinition of lifecycle.pluginDefinitions) { const lifecycleSession = await lifecycle.getSessionForPluginDefinitionAsync(pluginDefinition); deleteOperations.push({ sourcePath: lifecycleSession.tempFolderPath }); } // Create the options and provide a utility method to obtain paths to delete const cleanHookOptions = { addDeleteOperations: (...deleteOperationsToAdd) => deleteOperations.push(...deleteOperationsToAdd) }; // Run the plugin clean hook if (lifecycle.hooks.clean.isUsed()) { try { await lifecycle.hooks.clean.promise(cleanHookOptions); } catch (e) { // Log out using the clean logger, and return an error status if (!(e instanceof node_core_library_1.AlreadyReportedError)) { lifecycleLogger.emitError(e); } throw new node_core_library_1.AlreadyReportedError(); } } // Delete the files if any were specified if (deleteOperations.length) { const rootFolderPath = internalHeftSession.heftConfiguration.buildFolderPath; await (0, DeleteFilesPlugin_1.deleteFilesAsync)(rootFolderPath, deleteOperations, lifecycleLogger.terminal); } lifecycleLogger.terminal.writeVerboseLine(`Finished clean (${node_perf_hooks_1.performance.now() - startTime}ms)`); if (lifecycleLogger.hasErrors) { throw new node_core_library_1.AlreadyReportedError(); } } // Run the start hook if (lifecycle.hooks.toolStart.isUsed()) { const lifecycleToolStartHookOptions = {}; await lifecycle.hooks.toolStart.promise(lifecycleToolStartHookOptions); if (lifecycleLogger.hasErrors) { throw new node_core_library_1.AlreadyReportedError(); } } } async function _finishLifecycleAsync(internalHeftSession) { const lifecycleToolFinishHookOptions = {}; await internalHeftSession.lifecycle.hooks.toolFinish.promise(lifecycleToolFinishHookOptions); } //# sourceMappingURL=HeftActionRunner.js.map