nx
Version:
1,046 lines (1,045 loc) • 49.1 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TaskOrchestrator = void 0;
exports.getThreadPoolSize = getThreadPoolSize;
const events_1 = require("events");
const fs_1 = require("fs");
const pc = require("picocolors");
const path_1 = require("path");
const perf_hooks_1 = require("perf_hooks");
const run_commands_impl_1 = require("../executors/run-commands/run-commands.impl");
const hash_task_1 = require("../hasher/hash-task");
const task_graph_utils_1 = require("./task-graph-utils");
const task_hasher_1 = require("../hasher/task-hasher");
const native_1 = require("../native");
const db_connection_1 = require("../utils/db-connection");
const output_1 = require("../utils/output");
const params_1 = require("../utils/params");
const workspace_root_1 = require("../utils/workspace-root");
const exit_codes_1 = require("../utils/exit-codes");
const cache_1 = require("./cache");
const forked_process_task_runner_1 = require("./forked-process-task-runner");
const is_tui_enabled_1 = require("./is-tui-enabled");
const pseudo_terminal_1 = require("./pseudo-terminal");
const output_prefix_1 = require("./running-tasks/output-prefix");
const noop_child_process_1 = require("./running-tasks/noop-child-process");
const task_env_1 = require("./task-env");
const tasks_schedule_1 = require("./tasks-schedule");
const utils_1 = require("./utils");
const shared_running_task_1 = require("./running-tasks/shared-running-task");
class TaskOrchestrator {
// endregion internal state
constructor(hasher, initiatingProject, initiatingTasks, projectGraph, taskGraph, nxJson, options, bail, daemon, outputStyle, taskGraphForHashing = taskGraph) {
this.hasher = hasher;
this.initiatingProject = initiatingProject;
this.initiatingTasks = initiatingTasks;
this.projectGraph = projectGraph;
this.taskGraph = taskGraph;
this.nxJson = nxJson;
this.options = options;
this.bail = bail;
this.daemon = daemon;
this.outputStyle = outputStyle;
this.taskGraphForHashing = taskGraphForHashing;
this.taskDetails = (0, hash_task_1.getTaskDetails)();
this.cache = (0, cache_1.getCache)(this.options);
this.tuiEnabled = (0, is_tui_enabled_1.isTuiEnabled)();
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options, this.tuiEnabled);
this.runningTasksService = !native_1.IS_WASM
? new native_1.RunningTasksService((0, db_connection_1.getDbConnection)())
: null;
this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
// region internal state
this.batchEnv = (0, task_env_1.getEnvVariablesForBatchProcess)(this.options.skipNxCache, this.options.captureStderr);
this.reverseTaskDeps = (0, utils_1.calculateReverseDeps)(this.taskGraph);
this.initializingTaskIds = new Set(this.initiatingTasks.map((t) => t.id));
this.processedTasks = new Map();
this.completedTasks = {};
this.waitingForTasks = [];
this.groups = [];
this.continuousTasksStarted = 0;
this.bailed = false;
this.resolveStopPromise = null;
this.stopRequested = false;
this.runningContinuousTasks = new Map();
this.runningRunCommandsTasks = new Map();
this.runningDiscreteTasks = new Map();
this.discreteTaskExitHandled = new Map();
this.continuousTaskExitHandled = new Map();
this.cleanupPromise = null;
}
async init() {
this.setupSignalHandlers();
// Init the ForkedProcessTaskRunner, TasksSchedule, and Cache
await Promise.all([
this.forkedProcessTaskRunner.init(),
this.tasksSchedule.init().then(() => {
return this.tasksSchedule.scheduleNextTasks();
}),
'init' in this.cache ? this.cache.init() : null,
]);
// Pass estimated timings to TUI after TasksSchedule is initialized
if (this.tuiEnabled) {
const estimatedTimings = this.tasksSchedule.getEstimatedTaskTimings();
this.options.lifeCycle.setEstimatedTaskTimings(estimatedTimings);
}
}
async run() {
await this.init();
perf_hooks_1.performance.mark('task-execution:start');
const { discrete, continuous, total } = getThreadPoolSize(this.options, this.taskGraph);
const threads = [];
process.stdout.setMaxListeners(total + events_1.defaultMaxListeners);
process.stderr.setMaxListeners(total + events_1.defaultMaxListeners);
process.setMaxListeners(total + events_1.defaultMaxListeners);
// initial seeding of the queue
for (let i = 0; i < discrete; ++i) {
threads.push(this.executeDiscreteTaskLoop());
}
for (let i = 0; i < continuous; ++i) {
threads.push(this.executeContinuousTaskLoop(continuous));
}
await Promise.race([
Promise.all(threads),
...(this.tuiEnabled
? [
new Promise((resolve) => {
this.options.lifeCycle.registerForcedShutdownCallback(() => {
// The user force quit the TUI with ctrl+c, so proceed onto cleanup
this.stopRequested = true;
resolve(undefined);
});
}),
]
: [
new Promise((resolve) => {
this.resolveStopPromise = resolve;
}),
]),
]);
perf_hooks_1.performance.mark('task-execution:end');
perf_hooks_1.performance.measure('task-execution', 'task-execution:start', 'task-execution:end');
if (!this.stopRequested) {
this.cache.removeOldCacheRecords();
}
await this.cleanup();
return this.completedTasks;
}
nextBatch() {
return this.tasksSchedule.nextBatch();
}
async executeDiscreteTaskLoop() {
const doNotSkipCache = this.options.skipNxCache === false ||
this.options.skipNxCache === undefined;
while (true) {
// completed all the tasks
if (!this.tasksSchedule.hasTasks() || this.bailed || this.stopRequested) {
return null;
}
this.processAllScheduledTasks();
const batch = this.nextBatch();
if (batch) {
const groupId = this.closeGroup();
await this.applyFromCacheOrRunBatch(doNotSkipCache, batch, groupId);
this.openGroup(groupId);
continue;
}
const task = this.tasksSchedule.nextTask((t) => !t.continuous);
if (task) {
const groupId = this.closeGroup();
await this.applyFromCacheOrRunTask(doNotSkipCache, task, groupId);
this.openGroup(groupId);
continue;
}
// block until some other task completes, then try again
await new Promise((res) => this.waitingForTasks.push(res));
}
}
async executeContinuousTaskLoop(continuousTaskCount) {
while (true) {
// completed all the tasks
if (!this.tasksSchedule.hasTasks() || this.bailed || this.stopRequested) {
return null;
}
this.processAllScheduledTasks();
const task = this.tasksSchedule.nextTask((t) => t.continuous);
if (task) {
// Use a separate groupId space (parallel..parallel+N) so continuous tasks
// don't consume discrete group slots
const groupId = this.options.parallel + this.continuousTasksStarted++;
const runningTask = await this.startContinuousTask(task, groupId);
if (this.initializingTaskIds.has(task.id)) {
await this.continuousTaskExitHandled.get(task.id);
}
// all continuous tasks have been started, thread can exit
if (this.continuousTasksStarted >= continuousTaskCount) {
return null;
}
continue;
}
// all continuous tasks have been started, thread can exit
if (this.continuousTasksStarted >= continuousTaskCount) {
return null;
}
// block until some other task completes, then try again
await new Promise((res) => this.waitingForTasks.push(res));
}
}
processTasks(taskIds) {
for (const taskId of taskIds) {
// Task is already handled or being handled
if (!this.processedTasks.has(taskId)) {
this.processedTasks.set(taskId, this.processTask(taskId));
}
}
}
// region Processing Scheduled Tasks
async processTask(taskId) {
const task = this.taskGraph.tasks[taskId];
const taskSpecificEnv = (0, task_env_1.getTaskSpecificEnv)(task, this.projectGraph);
if (!task.hash) {
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraphForHashing, task, taskSpecificEnv, this.taskDetails);
}
await this.options.lifeCycle.scheduleTask(task);
return taskSpecificEnv;
}
processAllScheduledTasks() {
const { scheduledTasks } = this.tasksSchedule.getAllScheduledTasks();
this.processTasks(scheduledTasks);
}
// endregion Processing Scheduled Tasks
// region Applying Cache
async applyCachedResults(tasks) {
const cacheableTasks = tasks.filter((t) => (0, utils_1.isCacheableTask)(t, this.options));
const res = await Promise.all(cacheableTasks.map((t) => this.applyCachedResult(t)));
return res.filter((r) => r !== null);
}
async applyCachedResult(task) {
const cachedResult = await this.cache.get(task);
if (!cachedResult || cachedResult.code !== 0)
return null;
const outputs = task.outputs;
const shouldCopyOutputsFromCache =
// No output files to restore
!!outputs.length &&
// Remote caches are restored to output dirs when applied and using db cache
(!cachedResult.remote || !(0, cache_1.dbCacheEnabled)()) &&
// Output files have not been touched since last run
(await this.shouldCopyOutputsFromCache(outputs, task.hash));
if (shouldCopyOutputsFromCache) {
await this.cache.copyFilesFromCache(task.hash, cachedResult, outputs);
}
const status = cachedResult.remote
? 'remote-cache'
: shouldCopyOutputsFromCache
? 'local-cache'
: 'local-cache-kept-existing';
this.options.lifeCycle.printTaskTerminalOutput(task, status, cachedResult.terminalOutput);
return {
code: cachedResult.code,
task,
status,
};
}
// endregion Applying Cache
// region Batch
/**
* Hash all batch tasks and resolve cache hits topologically.
*
* Walks the task graph level by level. Every task gets a preliminary hash
* (so startTasks always has a valid hash for Cloud). Tasks with depsOutputs
* whose deps weren't cached are ineligible for cache lookup but still
* receive a preliminary hash — they'll be re-hashed after execution.
*/
async applyBatchCachedResults(batch, doNotSkipCache, groupId) {
const cachedResults = [];
const needsRehashAfterExecution = new Set();
const tasks = Object.values(batch.taskGraph.tasks);
if (!doNotSkipCache) {
// Cache skipped — just hash so startTasks has valid hashes
await this.hashBatchTasks(tasks);
return { cachedResults, needsRehashAfterExecution };
}
const nonCachedTaskIds = new Set();
await (0, task_graph_utils_1.walkTaskGraph)(batch.taskGraph, async (rootTaskIds) => {
const rootTasks = rootTaskIds.map((id) => batch.taskGraph.tasks[id]);
await this.hashBatchTasks(rootTasks);
const eligible = [];
for (const task of rootTasks) {
const depIds = batch.taskGraph.dependencies[task.id];
const hasNonCachedDep = depIds.some((id) => nonCachedTaskIds.has(id));
if (hasNonCachedDep &&
(0, task_hasher_1.getInputs)(task, this.projectGraph, this.nxJson).depsOutputs.length > 0) {
nonCachedTaskIds.add(task.id);
needsRehashAfterExecution.add(task.id);
}
else {
eligible.push(task);
}
}
if (eligible.length > 0) {
const cacheResults = await this.applyCachedResults(eligible);
const cachedIds = new Set(cacheResults.map((r) => r.task.id));
cachedResults.push(...cacheResults);
if (cacheResults.length > 0) {
const cachedTasks = cacheResults.map((r) => r.task);
await Promise.all(cachedTasks.map((task) => this.options.lifeCycle.scheduleTask(task)));
await this.preRunSteps(cachedTasks, { groupId });
await this.postRunSteps(cacheResults, doNotSkipCache, { groupId });
}
for (const task of eligible) {
if (!cachedIds.has(task.id)) {
nonCachedTaskIds.add(task.id);
}
}
}
});
return { cachedResults, needsRehashAfterExecution };
}
async hashBatchTasks(tasks) {
await (0, hash_task_1.hashTasks)(this.hasher, this.projectGraph, this.taskGraphForHashing, this.batchEnv, this.taskDetails, tasks);
}
async applyFromCacheOrRunBatch(doNotSkipCache, batch, groupId) {
const applyFromCacheOrRunBatchStart = perf_hooks_1.performance.mark('TaskOrchestrator-apply-from-cache-or-run-batch:start');
const taskEntries = Object.entries(batch.taskGraph.tasks);
const tasks = taskEntries.map(([, task]) => task);
this.options.lifeCycle.registerRunningBatch?.(batch.id, {
executorName: batch.executorName,
taskIds: Object.keys(batch.taskGraph.tasks),
});
const { cachedResults, needsRehashAfterExecution } = await this.applyBatchCachedResults(batch, doNotSkipCache, groupId);
// Schedule and start non-cached tasks (cached tasks were already
// started and completed inside applyBatchCachedResults)
const cachedTaskIds = new Set(cachedResults.map((r) => r.task.id));
const nonCachedTasks = tasks.filter((t) => !cachedTaskIds.has(t.id));
if (nonCachedTasks.length > 0) {
await Promise.all(nonCachedTasks.map((task) => this.options.lifeCycle.scheduleTask(task)));
await this.preRunSteps(nonCachedTasks, { groupId });
}
// Phase 2: Run non-cached tasks, then re-hash depsOutputs tasks
const taskIdsToSkip = cachedResults.map((r) => r.task.id);
let batchResults = [];
if (taskIdsToSkip.length < tasks.length) {
const runGraph = (0, utils_1.removeTasksFromTaskGraph)(batch.taskGraph, taskIdsToSkip);
batchResults = await this.runBatch({
id: batch.id,
executorName: batch.executorName,
taskGraph: runGraph,
}, this.batchEnv, groupId);
// Re-hash depsOutputs tasks — their dep outputs are now on disk
const tasksToRehash = batchResults
.filter((r) => needsRehashAfterExecution.has(r.task.id) &&
(r.status === 'success' || r.status === 'failure'))
.map((r) => r.task);
if (tasksToRehash.length > 0) {
await this.hashBatchTasks(tasksToRehash);
}
}
if (batchResults.length > 0) {
await this.postRunSteps(batchResults, doNotSkipCache, { groupId });
}
// Update batch status based on all task results
const hasFailures = taskEntries.some(([taskId]) => {
const status = this.completedTasks[taskId];
return status === 'failure' || status === 'skipped';
});
this.options.lifeCycle.setBatchStatus?.(batch.id, hasFailures ? "Failure" /* BatchStatus.Failure */ : "Success" /* BatchStatus.Success */);
this.forkedProcessTaskRunner.cleanUpBatchProcesses();
const tasksCompleted = taskEntries.filter(([taskId]) => this.completedTasks[taskId]);
// Batch is still not done, run it again
if (tasksCompleted.length !== taskEntries.length) {
await this.applyFromCacheOrRunBatch(doNotSkipCache, {
id: batch.id,
executorName: batch.executorName,
taskGraph: (0, utils_1.removeTasksFromTaskGraph)(batch.taskGraph, tasksCompleted.map(([taskId]) => taskId)),
}, groupId);
}
// Batch is done, mark it as completed
const applyFromCacheOrRunBatchEnd = perf_hooks_1.performance.mark('TaskOrchestrator-apply-from-cache-or-run-batch:end');
perf_hooks_1.performance.measure('TaskOrchestrator-apply-from-cache-or-run-batch', applyFromCacheOrRunBatchStart.name, applyFromCacheOrRunBatchEnd.name);
return [...cachedResults, ...batchResults];
}
async runBatch(batch, env, groupId) {
const runBatchStart = perf_hooks_1.performance.mark('TaskOrchestrator-run-batch:start');
try {
const batchProcess = await this.forkedProcessTaskRunner.forkProcessForBatch(batch, this.projectGraph, this.taskGraph, env);
// Stream output from batch process to the batch
batchProcess.onOutput((output) => {
this.options.lifeCycle.appendBatchOutput?.(batch.id, output);
});
// Stream task results as they complete
// Heavy operations (caching, scheduling, complete) happen at batch-end in postRunSteps
batchProcess.onTaskResults((taskId, result) => {
const task = this.taskGraph.tasks[taskId];
const status = result.success ? 'success' : 'failure';
this.options.lifeCycle.printTaskTerminalOutput(task, status, result.terminalOutput ?? '');
if (result.terminalOutput) {
this.options.lifeCycle.appendTaskOutput(taskId, result.terminalOutput, false);
}
task.startTime = result.startTime;
task.endTime = result.endTime;
if (result.startTime && result.endTime) {
this.options.lifeCycle.setTaskTiming?.(taskId, result.startTime, result.endTime);
}
this.options.lifeCycle.setTaskStatus(taskId, (0, native_1.parseTaskStatus)(status));
});
const results = await batchProcess.getResults();
const batchResultEntries = Object.entries(results);
return batchResultEntries.map(([taskId, result]) => {
const task = this.taskGraph.tasks[taskId];
task.startTime = result.startTime;
task.endTime = result.endTime;
return {
code: result.success ? 0 : 1,
task,
status: (result.success ? 'success' : 'failure'),
terminalOutput: result.terminalOutput,
};
});
}
catch (e) {
const isBatchStopping = this.stopRequested;
return Object.keys(batch.taskGraph.tasks).map((taskId) => {
const task = this.taskGraph.tasks[taskId];
if (isBatchStopping) {
task.endTime = Date.now();
}
return {
task,
code: 1,
status: (isBatchStopping ? 'stopped' : 'failure'),
terminalOutput: isBatchStopping ? '' : (e.stack ?? e.message ?? ''),
};
});
}
finally {
const runBatchEnd = perf_hooks_1.performance.mark('TaskOrchestrator-run-batch:end');
perf_hooks_1.performance.measure('TaskOrchestrator-run-batch', runBatchStart.name, runBatchEnd.name);
}
}
// endregion Batch
// region Single Task
async applyFromCacheOrRunTask(doNotSkipCache, task, groupId) {
// Wait for task to be processed
const taskSpecificEnv = await this.processedTasks.get(task.id);
await this.preRunSteps([task], { groupId });
const pipeOutput = await this.pipeOutputCapture(task);
// obtain metadata
const temporaryOutputPath = this.cache.temporaryOutputPath(task);
const streamOutput = this.outputStyle === 'static'
? false
: (0, utils_1.shouldStreamOutput)(task, this.initiatingProject);
let env = pipeOutput
? (0, task_env_1.getEnvVariablesForTask)(task, taskSpecificEnv, process.env.FORCE_COLOR === undefined
? 'true'
: process.env.FORCE_COLOR, this.options.skipNxCache, this.options.captureStderr, null, null)
: (0, task_env_1.getEnvVariablesForTask)(task, taskSpecificEnv, undefined, this.options.skipNxCache, this.options.captureStderr, temporaryOutputPath, streamOutput);
let results = doNotSkipCache ? await this.applyCachedResults([task]) : [];
// the task wasn't cached
let resolveDiscreteExit;
if (results.length === 0) {
const discreteExitHandled = new Promise((r) => (resolveDiscreteExit = r));
this.discreteTaskExitHandled.set(task.id, discreteExitHandled);
const childProcess = await this.runTask(task, streamOutput, env, temporaryOutputPath, pipeOutput);
this.runningDiscreteTasks.set(task.id, {
runningTask: childProcess,
stopping: false,
});
const { code, terminalOutput } = await childProcess.getResults();
const isStopping = this.runningDiscreteTasks.get(task.id)?.stopping ?? false;
this.runningDiscreteTasks.delete(task.id);
results.push({
task,
code,
status: isStopping ? 'stopped' : code === 0 ? 'success' : 'failure',
terminalOutput,
});
}
try {
await this.postRunSteps(results, doNotSkipCache, { groupId });
}
finally {
if (resolveDiscreteExit) {
this.discreteTaskExitHandled.delete(task.id);
resolveDiscreteExit();
}
}
return results[0];
}
async runTask(task, streamOutput, env, temporaryOutputPath, pipeOutput) {
const shouldPrefix = streamOutput &&
process.env.NX_PREFIX_OUTPUT === 'true' &&
!this.tuiEnabled;
const targetConfiguration = (0, utils_1.getTargetConfigurationForTask)(task, this.projectGraph);
if (process.env.NX_RUN_COMMANDS_DIRECTLY !== 'false' &&
targetConfiguration.executor === 'nx:run-commands') {
try {
const { schema } = (0, utils_1.getExecutorForTask)(task, this.projectGraph);
const combinedOptions = (0, params_1.combineOptionsForExecutor)(task.overrides, task.target.configuration ?? targetConfiguration.defaultConfiguration, targetConfiguration, schema, task.target.project, (0, path_1.relative)(task.projectRoot ?? workspace_root_1.workspaceRoot, process.cwd()), process.env.NX_VERBOSE_LOGGING === 'true');
if (combinedOptions.env) {
env = {
...env,
...combinedOptions.env,
};
}
if (streamOutput) {
const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
output_1.output.logCommand(args.join(' '));
}
const runCommandsOptions = {
...combinedOptions,
env,
usePty: this.tuiEnabled ||
(!this.tasksSchedule.hasTasks() &&
this.runningContinuousTasks.size === 0),
streamOutput: streamOutput && !shouldPrefix,
};
const runningTask = await (0, run_commands_impl_1.runCommands)(runCommandsOptions, {
root: workspace_root_1.workspaceRoot, // only root is needed in runCommands
}, task.id);
this.runningRunCommandsTasks.set(task.id, runningTask);
runningTask.onExit(() => {
this.runningRunCommandsTasks.delete(task.id);
});
if (shouldPrefix) {
const color = (0, output_prefix_1.getColor)(task.target.project);
const formattedPrefix = pc.bold(color(`${task.target.project}:`));
runningTask.onOutput((chunk) => {
(0, output_prefix_1.writePrefixedLines)(chunk, formattedPrefix);
});
}
else if (this.tuiEnabled) {
if (runningTask instanceof pseudo_terminal_1.PseudoTtyProcess) {
// This is an external of a the pseudo terminal where a task is running and can be passed to the TUI
this.options.lifeCycle.registerRunningTask(task.id, runningTask.getParserAndWriter());
runningTask.onOutput((output) => {
this.options.lifeCycle.appendTaskOutput(task.id, output, true);
});
}
else {
this.options.lifeCycle.registerRunningTaskWithEmptyParser(task.id);
runningTask.onOutput((output) => {
this.options.lifeCycle.appendTaskOutput(task.id, output, false);
});
}
}
if (!streamOutput && !shouldPrefix) {
// TODO: shouldn't this be checking if the task is continuous before writing anything to disk or calling printTaskTerminalOutput?
runningTask.onExit((code, terminalOutput) => {
this.options.lifeCycle.printTaskTerminalOutput(task, code === 0 ? 'success' : 'failure', terminalOutput);
(0, fs_1.writeFileSync)(temporaryOutputPath, terminalOutput);
});
}
return runningTask;
}
catch (e) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.error(e);
}
else {
console.error(e.message);
}
const terminalOutput = e.stack ?? e.message ?? '';
(0, fs_1.writeFileSync)(temporaryOutputPath, terminalOutput);
return new noop_child_process_1.NoopChildProcess({
code: 1,
terminalOutput,
});
}
}
else if (targetConfiguration.executor === 'nx:noop') {
(0, fs_1.writeFileSync)(temporaryOutputPath, '');
return new noop_child_process_1.NoopChildProcess({
code: 0,
terminalOutput: '',
});
}
else {
// cache prep
const runningTask = await this.runTaskInForkedProcess(task, env, pipeOutput, temporaryOutputPath, streamOutput);
if (this.tuiEnabled) {
if (runningTask instanceof pseudo_terminal_1.PseudoTtyProcess) {
// This is an external of a the pseudo terminal where a task is running and can be passed to the TUI
this.options.lifeCycle.registerRunningTask(task.id, runningTask.getParserAndWriter());
runningTask.onOutput((output) => {
this.options.lifeCycle.appendTaskOutput(task.id, output, true);
});
}
else if ('onOutput' in runningTask &&
typeof runningTask.onOutput === 'function') {
// Register task that can provide progressive output but isn't interactive (e.g., NodeChildProcessWithNonDirectOutput)
this.options.lifeCycle.registerRunningTaskWithEmptyParser(task.id);
runningTask.onOutput((output) => {
this.options.lifeCycle.appendTaskOutput(task.id, output, false);
});
}
else {
// Fallback for tasks that don't support progressive output
this.options.lifeCycle.registerRunningTaskWithEmptyParser(task.id);
}
}
return runningTask;
}
}
async runTaskInForkedProcess(task, env, pipeOutput, temporaryOutputPath, streamOutput) {
try {
const usePtyFork = process.env.NX_NATIVE_COMMAND_RUNNER !== 'false';
// Disable the pseudo terminal if this is a run-many or when running a continuous task as part of a run-one
const disablePseudoTerminal = !this.tuiEnabled && (!this.initiatingProject || task.continuous);
// execution
const childProcess = usePtyFork
? await this.forkedProcessTaskRunner.forkProcess(task, {
temporaryOutputPath,
streamOutput,
pipeOutput,
taskGraph: this.taskGraph,
env,
disablePseudoTerminal,
})
: await this.forkedProcessTaskRunner.forkProcessLegacy(task, {
temporaryOutputPath,
streamOutput,
pipeOutput,
taskGraph: this.taskGraph,
env,
});
return childProcess;
}
catch (e) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.error(e);
}
return new noop_child_process_1.NoopChildProcess({
code: 1,
terminalOutput: e.stack ?? e.message ?? '',
});
}
}
async startContinuousTask(task, groupId) {
if (this.runningTasksService &&
this.runningTasksService.getRunningTasks([task.id]).length) {
await this.preRunSteps([task], { groupId });
if (this.tuiEnabled) {
this.options.lifeCycle.setTaskStatus(task.id, 8 /* NativeTaskStatus.Shared */);
}
const runningTask = new shared_running_task_1.SharedRunningTask(this.runningTasksService, task.id);
this.runningContinuousTasks.set(task.id, {
runningTask,
groupId,
ownsRunningTasksService: false,
});
this.continuousTaskExitHandled.set(task.id, new Promise((resolve) => {
runningTask.onExit(async (code) => {
await this.handleContinuousTaskExit(code, task, groupId, false);
resolve();
});
}));
// task is already running by another process, we schedule the next tasks
// and release the threads
await this.scheduleNextTasksAndReleaseThreads();
return runningTask;
}
const taskSpecificEnv = await this.processedTasks.get(task.id);
await this.preRunSteps([task], { groupId });
const pipeOutput = await this.pipeOutputCapture(task);
// obtain metadata
const temporaryOutputPath = this.cache.temporaryOutputPath(task);
const streamOutput = this.outputStyle === 'static'
? false
: (0, utils_1.shouldStreamOutput)(task, this.initiatingProject);
let env = pipeOutput
? (0, task_env_1.getEnvVariablesForTask)(task, taskSpecificEnv, process.env.FORCE_COLOR === undefined
? 'true'
: process.env.FORCE_COLOR, this.options.skipNxCache, this.options.captureStderr, null, null)
: (0, task_env_1.getEnvVariablesForTask)(task, taskSpecificEnv, undefined, this.options.skipNxCache, this.options.captureStderr, temporaryOutputPath, streamOutput);
const childProcess = await this.runTask(task, streamOutput, env, temporaryOutputPath, pipeOutput);
this.runningTasksService?.addRunningTask(task.id);
this.runningContinuousTasks.set(task.id, {
runningTask: childProcess,
groupId,
ownsRunningTasksService: true,
});
this.continuousTaskExitHandled.set(task.id, new Promise((resolve) => {
childProcess.onExit(async (code) => {
await this.handleContinuousTaskExit(code, task, groupId, true);
resolve();
});
}));
await this.scheduleNextTasksAndReleaseThreads();
return childProcess;
}
// endregion Single Task
// region Lifecycle
async preRunSteps(tasks, metadata) {
const now = Date.now();
for (const task of tasks) {
task.startTime = now;
}
await this.options.lifeCycle.startTasks(tasks, metadata);
}
async postRunSteps(results, doNotSkipCache, { groupId }) {
const now = Date.now();
for (const { task } of results) {
// Only set endTime as fallback (batch provides timing via result.task)
task.endTime ??= now;
if (!this.stopRequested) {
await this.recordOutputsHash(task);
}
}
if (doNotSkipCache && !this.stopRequested) {
// cache the results
perf_hooks_1.performance.mark('cache-results-start');
await Promise.all(results
.filter(({ status }) => status !== 'local-cache' &&
status !== 'local-cache-kept-existing' &&
status !== 'remote-cache' &&
status !== 'skipped' &&
status !== 'stopped')
.map((result) => ({
...result,
code: result.status === 'local-cache' ||
result.status === 'local-cache-kept-existing' ||
result.status === 'remote-cache' ||
result.status === 'success'
? 0
: 1,
outputs: result.task.outputs,
}))
.filter(({ task, code }) => this.shouldCacheTaskResult(task, code))
.filter(({ terminalOutput, outputs }) => terminalOutput || outputs)
.map(async ({ task, code, terminalOutput, outputs }) => this.cache.put(task, terminalOutput, outputs, code)));
perf_hooks_1.performance.mark('cache-results-end');
perf_hooks_1.performance.measure('cache-results', 'cache-results-start', 'cache-results-end');
}
await this.complete(results, groupId);
await this.scheduleNextTasksAndReleaseThreads();
}
async scheduleNextTasksAndReleaseThreads() {
if (this.stopRequested) {
this.waitingForTasks.forEach((f) => f(null));
this.waitingForTasks.length = 0;
return;
}
await this.tasksSchedule.scheduleNextTasks();
// release blocked threads
this.waitingForTasks.forEach((f) => f(null));
this.waitingForTasks.length = 0;
}
async complete(results, groupId) {
await this.completeTasks(results, groupId);
this.cleanUpUnneededContinuousTasks();
}
/**
* Unified task completion handler for a set of tasks.
* - Calls endTasks() lifecycle hook (non-skipped only)
* - Marks complete in scheduler
* - Sets completedTasks
* - Updates TUI status
* - Skip dependent tasks
*/
async completeTasks(results, groupId) {
// 1. endTasks FIRST (non-skipped only)
const tasksToReport = [];
const taskIds = [];
for (const { task, status, terminalOutput } of results) {
taskIds.push(task.id);
if (this.completedTasks[task.id] === undefined && status !== 'skipped') {
tasksToReport.push({
task,
status,
terminalOutput,
code: status === 'success' ||
status === 'local-cache' ||
status === 'local-cache-kept-existing' ||
status === 'remote-cache'
? 0
: 1,
});
}
}
if (tasksToReport.length > 0) {
await this.options.lifeCycle.endTasks(tasksToReport, { groupId });
}
// 2. Mark complete in scheduler
this.tasksSchedule.complete(taskIds);
// 3. Set completedTasks + update TUI + collect dependent tasks to skip
const dependentTasksToSkip = [];
for (const { task, status, displayStatus } of results) {
if (this.completedTasks[task.id] !== undefined)
continue;
this.completedTasks[task.id] = status;
if (this.tuiEnabled) {
this.options.lifeCycle.setTaskStatus(task.id, displayStatus ?? (0, native_1.parseTaskStatus)(status));
}
if (status === 'failure' ||
status === 'skipped' ||
status === 'stopped') {
if (this.bail) {
// mark the execution as bailed which will stop all further execution
// only the tasks that are currently running will finish
this.bailed = true;
}
else {
// Collect reverse deps to skip
for (const depTaskId of this.reverseTaskDeps[task.id]) {
const depTask = this.taskGraph.tasks[depTaskId];
if (depTask) {
// Don't skip tasks that are still running/stopping — their own
// exit handler will set the correct terminal status
if (this.runningDiscreteTasks.has(depTaskId) ||
this.runningContinuousTasks.has(depTaskId)) {
continue;
}
dependentTasksToSkip.push({ task: depTask, status: 'skipped' });
}
}
}
}
}
// 4. Skip dependent tasks
if (dependentTasksToSkip.length > 0) {
await this.completeTasks(dependentTasksToSkip, groupId);
}
}
//endregion Lifecycle
// region utils
async pipeOutputCapture(task) {
try {
if (process.env.NX_NATIVE_COMMAND_RUNNER !== 'false') {
return true;
}
// When TUI is enabled, we need to use pipe output capture to support
// progressive output streaming via the onOutput callback
if (this.tuiEnabled) {
return true;
}
const { schema } = (0, utils_1.getExecutorForTask)(task, this.projectGraph);
return (schema.outputCapture === 'pipe' ||
process.env.NX_STREAM_OUTPUT === 'true');
}
catch (e) {
return false;
}
}
shouldCacheTaskResult(task, code) {
return ((0, utils_1.isCacheableTask)(task, this.options) &&
(process.env.NX_CACHE_FAILURES == 'true' ? true : code === 0));
}
closeGroup() {
for (let i = 0; i < this.options.parallel; i++) {
if (!this.groups[i]) {
this.groups[i] = true;
return i;
}
}
}
openGroup(id) {
this.groups[id] = false;
}
async shouldCopyOutputsFromCache(outputs, hash) {
if (this.daemon?.enabled()) {
return !(await this.daemon.outputsHashesMatch(outputs, hash));
}
else {
return true;
}
}
async recordOutputsHash(task) {
if (this.daemon?.enabled()) {
return this.daemon.recordOutputsHash(task.outputs, task.hash);
}
}
// endregion utils
async handleContinuousTaskExit(code, task, groupId, ownsRunningTasksService) {
// If cleanup already completed this task, nothing left to do
if (this.completedTasks[task.id] !== undefined) {
return;
}
const stoppingReason = this.runningContinuousTasks.get(task.id)?.stoppingReason;
if (stoppingReason || exit_codes_1.EXPECTED_TERMINATION_SIGNALS.has(code)) {
const reason = stoppingReason === 'fulfilled' ? 'fulfilled' : 'interrupted';
await this.completeContinuousTask(task, groupId, ownsRunningTasksService, reason);
}
else {
console.error(`Task "${task.id}" is continuous but exited with code ${code}`);
await this.completeContinuousTask(task, groupId, ownsRunningTasksService, 'crashed');
}
}
async completeContinuousTask(task, groupId, ownsRunningTasksService, reason) {
if (this.completedTasks[task.id] !== undefined)
return;
this.runningContinuousTasks.delete(task.id);
if (ownsRunningTasksService) {
this.runningTasksService?.removeRunningTask(task.id);
}
task.endTime = Date.now();
if (reason === 'fulfilled') {
await this.complete([
{
task,
status: 'success',
displayStatus: 9 /* NativeTaskStatus.Stopped */,
},
], groupId);
}
else if (reason === 'crashed') {
await this.complete([{ task, status: 'failure' }], groupId);
}
else {
await this.complete([{ task, status: 'stopped' }], groupId);
}
}
async cleanup() {
if (this.cleanupPromise) {
return this.cleanupPromise;
}
this.cleanupPromise = this.performCleanup();
return this.cleanupPromise;
}
async performCleanup() {
// Mark all running tasks for intentional stop
const reason = this.stopRequested ? 'interrupted' : 'fulfilled';
for (const entry of this.runningContinuousTasks.values()) {
entry.stoppingReason = reason;
}
for (const entry of this.runningDiscreteTasks.values()) {
entry.stopping = true;
}
// Snapshot continuous tasks before clearing the map.
// We clear first because complete() -> cleanUpUnneededContinuousTasks()
// iterates runningContinuousTasks and would re-kill already-stopping tasks.
const continuousSnapshot = Array.from(this.runningContinuousTasks.entries());
this.runningContinuousTasks.clear();
// Complete continuous tasks directly — don't rely on onExit which may hang
// when grandchild processes keep the pty slave fd open.
for (const [taskId, { groupId, ownsRunningTasksService },] of continuousSnapshot) {
const task = this.taskGraph.tasks[taskId];
if (!task)
continue;
await this.completeContinuousTask(task, groupId, ownsRunningTasksService, reason);
}
// Kill all processes
this.forkedProcessTaskRunner.cleanup();
await Promise.all([
...continuousSnapshot.map(async ([taskId, { runningTask }]) => {
try {
await runningTask.kill();
}
catch (e) {
console.error(`Unable to terminate ${taskId}\nError:`, e);
}
}),
...Array.from(this.runningDiscreteTasks).map(async ([taskId, { runningTask }]) => {
try {
await runningTask.kill();
}
catch (e) {
console.error(`Unable to terminate ${taskId}\nError:`, e);
}
}),
...Array.from(this.runningRunCommandsTasks).map(async ([taskId, t]) => {
try {
await t.kill();
}
catch (e) {
console.error(`Unable to terminate ${taskId}\nError:`, e);
}
}),
]);
// Discrete exit promises resolve promptly (process kill → getResults →
// postRunSteps → resolve). Await them so lifecycle endTasks() completes
// before run() returns and endCommand() is called.
await Promise.all(this.discreteTaskExitHandled.values());
}
setupSignalHandlers() {
process.once('SIGINT', () => {
this.stopRequested = true;
if (!this.tuiEnabled) {
// Synchronously remove DB entries before async cleanup to prevent
// new nx processes from seeing stale "Waiting for ..." messages.
// This replicates the cleanup that process.exit() + Rust Drop
// previously provided.
for (const [taskId, { ownsRunningTasksService }] of this
.runningContinuousTasks) {
if (ownsRunningTasksService) {
this.runningTasksService?.removeRunningTask(taskId);
}
}
// Silence output — pnpm (and similar wrappers) may exit before nx
// finishes cleanup, returning the shell prompt. Any output after
// that point would appear after the prompt.
const noop = (_chunk, _encoding, callback) => {
if (callback)
callback();
return true;
};
process.stdout.write = noop;
process.stderr.write = noop;
}
this.cleanup().finally(() => {
if (this.resolveStopPromise) {
this.resolveStopPromise();
}
else {
process.exit((0, exit_codes_1.signalToCode)('SIGINT'));
}
});
});
process.once('SIGTERM', () => {
this.stopRequested = true;
this.cleanup().finally(() => {
if (this.resolveStopPromise) {
this.resolveStopPromise();
}
});
});
process.once('SIGHUP', () => {
this.stopRequested = true;
this.cleanup().finally(() => {
if (this.resolveStopPromise) {
this.resolveStopPromise();
}
});
});
}
cleanUpUnneededContinuousTasks() {
const incompleteTasks = this.tasksSchedule.getIncompleteTasks();
const neededContinuousTasks = new Set();
for (const task of incompleteTasks) {
// Keep initiating tasks that are still incomplete
if (task.continuous && this.initializingTaskIds.has(task.id)) {
neededContinuousTasks.add(task.id);
}
const continuousDependencies = this.taskGraph.continuousDependencies[task.id];
for (const continuousDependency of continuousDependencies) {
neededContinuousTasks.add(continuousDependency);
}
}
for (const [taskId, entry] of this.runningContinuousTasks) {
if (!neededContinuousTasks.has(taskId)) {
// Mark as intentional kill before calling kill()
// onExit will see this and use success/Stopped
entry.stoppingReason = 'fulfilled';
entry.runningTask.kill();
}
}
}
}
exports.TaskOrchestrator = TaskOrchestrator;
function getThreadPoolSize(options, taskGraph) {
if (options['parallel'] === 'false' ||
options['parallel'] === false) {
options['parallel'] = 1;
}
else if (options['parallel'] === 'true' ||
options['parallel'] === true ||
options['parallel'] === undefined ||
options['parallel'] === '') {
options['parallel'] = Number(options['maxParallel'] || 3);
}
const continuousCount = Object.values(taskGraph.tasks).filter((t) => t.continuous).length;
const discrete = options['parallel'];
const continuous = continuousCount;
const total = discrete + continuous;
return { discrete, continuous, total };
}