wireit
Version:
Upgrade your npm scripts to make them smarter and more efficient
1,144 lines • 68.8 kB
JavaScript
/**
* @license
* Copyright 2022 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as pathlib from 'path';
import { scriptReferenceToString } from './config.js';
import { findNodeAtLocation } from './util/ast.js';
import * as fs from './util/fs.js';
import { CachingPackageJsonReader, } from './util/package-json-reader.js';
import { IS_WINDOWS } from './util/windows.js';
/**
* Globs that will be injected into both `files` and `output`, unless
* `allowUsuallyExcludedPaths` is `true`.
*
* See https://docs.npmjs.com/cli/v9/configuring-npm/package-json#files for the
* similar list of paths that npm ignores.
*/
const DEFAULT_EXCLUDE_PATHS = [
'!.git/',
'!.hg/',
'!.svn/',
'!.wireit/',
'!.yarn/',
'!CVS/',
'!node_modules/',
];
const DEFAULT_LOCKFILES = {
npm: ['package-lock.json'],
nodeRun: ['package-lock.json'],
yarnClassic: ['yarn.lock'],
yarnBerry: ['yarn.lock'],
pnpm: ['pnpm-lock.yaml'],
};
function isValidWireitScriptCommand(command) {
return (command === 'wireit' ||
command === 'yarn run -TB wireit' ||
// This form is useful when using package managers like yarn or pnpm which
// do not automatically add all parent directory `node_modules/.bin`
// folders to PATH.
/^(\.\.\/)+node_modules\/\.bin\/wireit$/.test(command) ||
(IS_WINDOWS && /^(\.\.\\)+node_modules\\\.bin\\wireit\.cmd$/.test(command)));
}
/**
* Analyzes and validates a script along with all of its transitive
* dependencies, producing a build graph that is ready to be executed.
*/
export class Analyzer {
#packageJsonReader;
#placeholders = new Map();
#ongoingWorkPromises = [];
#relevantConfigFilePaths = new Set();
#agent;
#logger;
constructor(agent, logger, filesystem) {
this.#agent = agent;
this.#logger = logger;
this.#packageJsonReader = new CachingPackageJsonReader(filesystem);
}
/**
* Analyze every script in each given file and return all diagnostics found.
*/
async analyzeFiles(files) {
await Promise.all(files.map(async (f) => {
const packageDir = pathlib.dirname(f);
const fileResult = await this.getPackageJson(packageDir);
if (!fileResult.ok) {
return; // will get this error below.
}
for (const script of fileResult.value.scripts) {
// This starts analysis of each of the scripts in our root files.
this.#getPlaceholder({ name: script.name, packageDir });
}
}));
await this.#waitForAnalysisToComplete();
// Check for cycles.
for (const info of this.#placeholders.values()) {
if (info.placeholder.state === 'unvalidated') {
continue;
}
// We don't care about the result, if there's a cycle error it'll
// be added to the scripts' diagnostics.
this.#checkForCyclesAndSortDependencies(info.placeholder, new Set(), true);
}
return this.#getDiagnostics();
}
/**
* Load the Wireit configuration from the `package.json` corresponding to the
* given script, repeat for all transitive dependencies, and return a build
* graph that is ready to be executed.
*
* Returns a Failure if the given script or any of its transitive
* dependencies don't exist, are configured in an invalid way, or if there is
* a cycle in the dependency graph.
*/
async analyze(root, extraArgs) {
this.#logger?.log({
type: 'info',
detail: 'analysis-started',
script: root,
});
const analyzeResult = await this.#actuallyAnalyze(root, extraArgs);
this.#logger?.log({
type: 'info',
detail: 'analysis-completed',
script: root,
rootScriptConfig: analyzeResult.config.ok
? analyzeResult.config.value
: undefined,
});
return analyzeResult;
}
async #actuallyAnalyze(root, extraArgs) {
// We do 2 walks through the dependency graph:
//
// 1. A non-deterministically ordered walk, where we traverse edges as soon
// as they are known, to maximize the parallelism of package.json file
// read operations.
//
// 2. A depth-first walk to detect cycles.
//
// We can't check for cycles in the 1st walk because its non-deterministic
// traversal order means that we could miss certain cycle configurations.
// Plus by doing a separate DFS walk, we'll always return the exact same
// trail in the error message for any given graph, instead of an arbitrary
// one.
//
// The way we avoid getting stuck in cycles during the 1st walk is by
// allocating an initial placeholder object for each script, and caching it
// by package + name. Then, instead of blocking each script on its
// dependencies (which would lead to a promise cycle if there was a cycle in
// the configuration), we wait for all placeholders to upgrade to full
// configs asynchronously.
const rootPlaceholder = this.#getPlaceholder(root);
// Note we can't use Promise.all here, because new promises can be added to
// the promises array as long as any promise is pending.
await this.#waitForAnalysisToComplete();
{
const errors = await this.#getDiagnostics();
if (errors.size > 0) {
return {
config: { ok: false, error: [...errors] },
relevantConfigFilePaths: this.#relevantConfigFilePaths,
};
}
}
// We can safely assume all placeholders have now been upgraded to full
// configs.
const rootConfig = rootPlaceholder.placeholder;
if (rootConfig.state === 'unvalidated') {
throw new Error(`Internal error: script ${root.name} in ${root.packageDir} is still unvalidated but had no failures`);
}
const cycleResult = this.#checkForCyclesAndSortDependencies(rootConfig, new Set(), true);
if (!cycleResult.ok) {
return {
config: { ok: false, error: [cycleResult.error.dependencyFailure] },
relevantConfigFilePaths: this.#relevantConfigFilePaths,
};
}
const validRootConfig = cycleResult.value;
validRootConfig.extraArgs = extraArgs;
return {
config: { ok: true, value: validRootConfig },
relevantConfigFilePaths: this.#relevantConfigFilePaths,
};
}
async analyzeIgnoringErrors(scriptReference) {
await this.analyze(scriptReference, []);
return this.#getPlaceholder(scriptReference).placeholder;
}
async #getDiagnostics() {
const failures = new Set();
for await (const failure of this.#packageJsonReader.getFailures()) {
failures.add(failure);
}
for (const info of this.#placeholders.values()) {
for (const failure of info.placeholder.failures) {
failures.add(failure);
}
}
for (const failure of failures) {
const supercedes = failure
.supercedes;
if (supercedes !== undefined) {
failures.delete(supercedes);
}
}
return failures;
}
async #waitForAnalysisToComplete() {
while (this.#ongoingWorkPromises.length > 0) {
const promise = this.#ongoingWorkPromises[this.#ongoingWorkPromises.length - 1];
await promise;
// Need to be careful here. The contract of this method is that it does
// not return until all pending analysis work is completed.
// If there are multiple concurrent callers to this method, we want to
// make sure that none of them hide any of the pending work from each
// other by removing a promise from the array before it has settled.
// So we first await the promise, and then remove it from the array if
// it's still the final element.
// It might not be the final element because another caller removed it,
// or because more work was added onto the end of the array. Either
// case is fine.
if (promise ===
this.#ongoingWorkPromises[this.#ongoingWorkPromises.length - 1]) {
void this.#ongoingWorkPromises.pop();
}
}
}
async getPackageJson(packageDir) {
this.#relevantConfigFilePaths.add(pathlib.join(packageDir, 'package.json'));
return this.#packageJsonReader.read(packageDir);
}
/**
* Adds the given package.json files to the known set, and analyzes all
* scripts reachable from any of them, recursively.
*
* Useful for whole program analysis, e.g. for "find all references" in the
* IDE.
*/
async analyzeAllScripts(packageJsonPaths) {
const done = new Set();
const todo = [];
for (const file of packageJsonPaths) {
const packageDir = pathlib.dirname(file);
const packageJsonResult = await this.getPackageJson(packageDir);
if (!packageJsonResult.ok) {
continue;
}
for (const script of packageJsonResult.value.scripts) {
todo.push({ name: script.name, packageDir });
}
}
while (true) {
await Promise.all(todo.map(async (ref) => {
await this.analyze(ref, undefined);
done.add(scriptReferenceToString(ref));
}));
todo.length = 0;
for (const info of this.#placeholders.values()) {
if (info.placeholder.state === 'unvalidated' &&
!done.has(scriptReferenceToString(info.placeholder))) {
todo.push(info.placeholder);
}
}
if (todo.length === 0) {
break;
}
}
return this.#placeholders.values();
}
/**
* Create or return a cached placeholder script configuration object for the
* given script reference.
*/
#getPlaceholder(reference) {
const scriptKey = scriptReferenceToString(reference);
let placeholderInfo = this.#placeholders.get(scriptKey);
if (placeholderInfo === undefined) {
const placeholder = {
...reference,
state: 'unvalidated',
failures: [],
};
placeholderInfo = {
placeholder: placeholder,
upgradeComplete: this.#upgradePlaceholder(placeholder),
};
this.#placeholders.set(scriptKey, placeholderInfo);
this.#ongoingWorkPromises.push(placeholderInfo.upgradeComplete);
}
return placeholderInfo;
}
/**
* In-place upgrade the given placeholder script configuration object to a
* full configuration, by reading its package.json file.
*
* Note this method does not block on the script's dependencies being
* upgraded; dependencies are upgraded asynchronously.
*/
async #upgradePlaceholder(placeholder) {
const packageJsonResult = await this.getPackageJson(placeholder.packageDir);
if (!packageJsonResult.ok) {
placeholder.failures.push(packageJsonResult.error);
return undefined;
}
const packageJson = packageJsonResult.value;
placeholder.failures.push(...packageJson.failures);
const syntaxInfo = packageJson.getScriptInfo(placeholder.name);
if (syntaxInfo?.wireitConfigNode !== undefined) {
await this.#handleWireitScript(placeholder, packageJson, syntaxInfo, syntaxInfo.wireitConfigNode);
}
else if (syntaxInfo?.scriptNode !== undefined) {
this.#handlePlainNpmScript(placeholder, packageJson, syntaxInfo.scriptNode);
}
else {
placeholder.failures.push({
type: 'failure',
reason: 'script-not-found',
script: placeholder,
diagnostic: {
severity: 'error',
message: `Script "${placeholder.name}" not found in the scripts section of this package.json.`,
location: {
file: packageJson.jsonFile,
range: { offset: 0, length: 0 },
},
},
});
}
return undefined;
}
#handlePlainNpmScript(placeholder, packageJson, scriptCommand) {
if (isValidWireitScriptCommand(scriptCommand.value)) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `This script is configured to run wireit but it has no config in the wireit section of this package.json file`,
location: {
file: packageJson.jsonFile,
range: {
length: scriptCommand.length,
offset: scriptCommand.offset,
},
},
},
});
}
// It's important to in-place update the placeholder object, instead of
// creating a new object, because other configs may be referencing this
// exact object in their dependencies.
const remainingConfig = {
...placeholder,
state: 'locally-valid',
failures: placeholder.failures,
command: scriptCommand,
extraArgs: undefined,
dependencies: [],
files: undefined,
output: undefined,
clean: false,
service: undefined,
scriptAstNode: scriptCommand,
configAstNode: undefined,
declaringFile: packageJson.jsonFile,
services: [],
env: {},
};
Object.assign(placeholder, remainingConfig);
}
async #handleWireitScript(placeholder, packageJson, syntaxInfo, wireitConfig) {
const scriptCommand = syntaxInfo.scriptNode;
if (scriptCommand !== undefined &&
!isValidWireitScriptCommand(scriptCommand.value)) {
{
const configName = wireitConfig.name;
placeholder.failures.push({
type: 'failure',
reason: 'script-not-wireit',
script: placeholder,
diagnostic: {
message: `This command should just be "wireit", ` +
`as this script is configured in the wireit section.`,
severity: 'warning',
location: {
file: packageJson.jsonFile,
range: {
length: scriptCommand.length,
offset: scriptCommand.offset,
},
},
supplementalLocations: [
{
message: `The wireit config is here.`,
location: {
file: packageJson.jsonFile,
range: {
length: configName.length,
offset: configName.offset,
},
},
},
],
},
});
}
}
const { dependencies, encounteredError: dependenciesErrored } = this.#processDependencies(placeholder, packageJson, syntaxInfo);
let command;
let commandError = false;
const commandAst = findNodeAtLocation(wireitConfig, ['command']);
if (commandAst !== undefined) {
const result = failUnlessNonBlankString(commandAst, packageJson.jsonFile);
if (result.ok) {
command = result.value;
}
else {
commandError = true;
placeholder.failures.push(result.error);
}
}
const allowUsuallyExcludedPaths = this.#processAllowUsuallyExcludedPaths(placeholder, packageJson, syntaxInfo);
const files = this.#processFiles(placeholder, packageJson, syntaxInfo, allowUsuallyExcludedPaths);
if (dependencies.length === 0 &&
!dependenciesErrored &&
command === undefined &&
!commandError &&
(files === undefined || files.values.length === 0)) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `A wireit config must set at least one of "command", "dependencies", or "files". Otherwise there is nothing for wireit to do.`,
location: {
file: packageJson.jsonFile,
range: {
length: wireitConfig.name.length,
offset: wireitConfig.name.offset,
},
},
},
});
}
const output = this.#processOutput(placeholder, packageJson, syntaxInfo, command, allowUsuallyExcludedPaths);
const clean = this.#processClean(placeholder, packageJson, syntaxInfo);
const service = this.#processService(placeholder, packageJson, syntaxInfo, command, output);
await this.#processPackageLocks(placeholder, packageJson, syntaxInfo, files);
const env = this.#processEnv(placeholder, packageJson, syntaxInfo, command);
if (placeholder.failures.length > 0) {
// A script with locally-determined errors doesn't get upgraded to
// locally-valid.
return;
}
// It's important to in-place update the placeholder object, instead of
// creating a new object, because other configs may be referencing this
// exact object in their dependencies.
const remainingConfig = {
...placeholder,
state: 'locally-valid',
failures: placeholder.failures,
command,
extraArgs: undefined,
dependencies,
files,
output,
clean,
service,
scriptAstNode: scriptCommand,
configAstNode: wireitConfig,
declaringFile: packageJson.jsonFile,
services: [],
env,
};
Object.assign(placeholder, remainingConfig);
}
#processDependencies(placeholder, packageJson, scriptInfo) {
const dependencies = [];
const dependenciesAst = scriptInfo.wireitConfigNode &&
findNodeAtLocation(scriptInfo.wireitConfigNode, ['dependencies']);
let encounteredError = false;
if (dependenciesAst === undefined) {
return { dependencies, encounteredError };
}
const result = failUnlessArray(dependenciesAst, packageJson.jsonFile);
if (!result.ok) {
encounteredError = true;
placeholder.failures.push(result.error);
return { dependencies, encounteredError };
}
// Error if the same dependency is declared multiple times. Duplicate
// dependencies aren't necessarily a serious problem (since we already
// prevent double-analysis here, and double-analysis in the Executor), but
// they may indicate that the user has made a mistake (e.g. maybe they
// meant a different dependency).
const uniqueDependencies = new Map();
const children = dependenciesAst.children ?? [];
for (const maybeUnresolved of children) {
// A dependency can be either a plain string, or an object with a "script"
// property plus optional extra annotations.
let specifierResult;
let cascade = true; // Default;
if (maybeUnresolved.type === 'string') {
specifierResult = failUnlessNonBlankString(maybeUnresolved, packageJson.jsonFile);
if (!specifierResult.ok) {
encounteredError = true;
placeholder.failures.push(specifierResult.error);
continue;
}
}
else if (maybeUnresolved.type === 'object') {
specifierResult = findNodeAtLocation(maybeUnresolved, ['script']);
if (specifierResult === undefined) {
encounteredError = true;
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: { packageDir: pathlib.dirname(packageJson.jsonFile.path) },
diagnostic: {
severity: 'error',
message: `Dependency object must set a "script" property.`,
location: {
file: packageJson.jsonFile,
range: {
offset: maybeUnresolved.offset,
length: maybeUnresolved.length,
},
},
},
});
continue;
}
specifierResult = failUnlessNonBlankString(specifierResult, packageJson.jsonFile);
if (!specifierResult.ok) {
encounteredError = true;
placeholder.failures.push(specifierResult.error);
continue;
}
const cascadeResult = findNodeAtLocation(maybeUnresolved, ['cascade']);
if (cascadeResult !== undefined) {
if (cascadeResult.value === true || cascadeResult.value === false) {
cascade = cascadeResult.value;
}
else {
encounteredError = true;
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: { packageDir: pathlib.dirname(packageJson.jsonFile.path) },
diagnostic: {
severity: 'error',
message: `The "cascade" property must be either true or false.`,
location: {
file: packageJson.jsonFile,
range: {
offset: cascadeResult.offset,
length: cascadeResult.length,
},
},
},
});
continue;
}
}
}
else {
encounteredError = true;
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: { packageDir: pathlib.dirname(packageJson.jsonFile.path) },
diagnostic: {
severity: 'error',
message: `Expected a string or object, but was ${maybeUnresolved.type}.`,
location: {
file: packageJson.jsonFile,
range: {
offset: maybeUnresolved.offset,
length: maybeUnresolved.length,
},
},
},
});
continue;
}
const unresolved = specifierResult.value;
const result = this.#resolveDependency(unresolved, placeholder, packageJson.jsonFile);
if (!result.ok) {
encounteredError = true;
placeholder.failures.push(result.error);
continue;
}
for (const resolved of result.value) {
const uniqueKey = scriptReferenceToString(resolved);
const duplicate = uniqueDependencies.get(uniqueKey);
if (duplicate !== undefined) {
encounteredError = true;
placeholder.failures.push({
type: 'failure',
reason: 'duplicate-dependency',
script: placeholder,
dependency: resolved,
diagnostic: {
severity: 'error',
message: `This dependency is listed multiple times`,
location: {
file: packageJson.jsonFile,
range: {
offset: unresolved.offset,
length: unresolved.length,
},
},
supplementalLocations: [
{
message: `The dependency was first listed here.`,
location: {
file: packageJson.jsonFile,
range: {
offset: duplicate.offset,
length: duplicate.length,
},
},
},
],
},
});
}
uniqueDependencies.set(uniqueKey, unresolved);
const placeHolderInfo = this.#getPlaceholder(resolved);
dependencies.push({
specifier: unresolved,
config: placeHolderInfo.placeholder,
cascade,
});
this.#ongoingWorkPromises.push((async () => {
await placeHolderInfo.upgradeComplete;
const failures = placeHolderInfo.placeholder.failures;
for (const failure of failures) {
if (failure.reason === 'script-not-found') {
const hasColon = unresolved.value.includes(':');
let offset;
let length;
if (!hasColon ||
resolved.packageDir === placeholder.packageDir) {
offset = unresolved.offset;
length = unresolved.length;
}
else {
// Skip past the colon
const colonOffsetInString = packageJson.jsonFile.contents
.slice(unresolved.offset)
.indexOf(':');
offset = unresolved.offset + colonOffsetInString + 1;
length = unresolved.length - colonOffsetInString - 2;
}
placeholder.failures.push({
type: 'failure',
reason: 'dependency-on-missing-script',
script: placeholder,
supercedes: failure,
diagnostic: {
severity: 'error',
message: `Cannot find script named ${JSON.stringify(resolved.name)} in package "${resolved.packageDir}"`,
location: {
file: packageJson.jsonFile,
range: { offset, length },
},
},
});
}
else if (failure.reason === 'missing-package-json') {
// Skip the opening "
const offset = unresolved.offset + 1;
// Take everything up to the first colon, but look in
// the original source, to avoid getting confused by escape
// sequences, which have a different length before and after
// encoding.
const length = packageJson.jsonFile.contents
.slice(offset)
.indexOf(':');
const range = { offset, length };
placeholder.failures.push({
type: 'failure',
reason: 'dependency-on-missing-package-json',
script: placeholder,
supercedes: failure,
diagnostic: {
severity: 'error',
message: `package.json file missing: "${pathlib.join(resolved.packageDir, 'package.json')}"`,
location: { file: packageJson.jsonFile, range },
},
});
}
}
return undefined;
})());
}
}
return { dependencies, encounteredError };
}
#processAllowUsuallyExcludedPaths(placeholder, packageJson, syntaxInfo) {
const defaultValue = false;
if (syntaxInfo.wireitConfigNode == null) {
return defaultValue;
}
const node = findNodeAtLocation(syntaxInfo.wireitConfigNode, [
'allowUsuallyExcludedPaths',
]);
if (node === undefined) {
return defaultValue;
}
if (node.value === true || node.value === false) {
return node.value;
}
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `Must be true or false`,
location: {
file: packageJson.jsonFile,
range: { length: node.length, offset: node.offset },
},
},
});
return defaultValue;
}
#processFiles(placeholder, packageJson, syntaxInfo, allowUsuallyExcludedPaths) {
if (syntaxInfo.wireitConfigNode === undefined) {
return;
}
const filesNode = findNodeAtLocation(syntaxInfo.wireitConfigNode, [
'files',
]);
if (filesNode === undefined) {
return;
}
const values = [];
const result = failUnlessArray(filesNode, packageJson.jsonFile);
if (!result.ok) {
placeholder.failures.push(result.error);
return;
}
const children = filesNode.children ?? [];
for (const file of children) {
const result = failUnlessNonBlankString(file, packageJson.jsonFile);
if (!result.ok) {
placeholder.failures.push(result.error);
continue;
}
values.push(result.value.value);
}
if (!allowUsuallyExcludedPaths && values.length > 0) {
values.push(...DEFAULT_EXCLUDE_PATHS);
}
return { node: filesNode, values };
}
#processOutput(placeholder, packageJson, syntaxInfo, command, allowUsuallyExcludedPaths) {
if (syntaxInfo.wireitConfigNode === undefined) {
return;
}
const outputNode = findNodeAtLocation(syntaxInfo.wireitConfigNode, [
'output',
]);
if (outputNode === undefined) {
return;
}
if (command === undefined) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `"output" can only be set if "command" is also set.`,
location: {
file: packageJson.jsonFile,
range: {
// Highlight the whole `"output": []` part.
length: (outputNode.parent ?? outputNode).length,
offset: (outputNode.parent ?? outputNode).offset,
},
},
},
});
}
const values = [];
const result = failUnlessArray(outputNode, packageJson.jsonFile);
if (!result.ok) {
placeholder.failures.push(result.error);
return;
}
const children = outputNode.children ?? [];
for (const anOutput of children) {
const result = failUnlessNonBlankString(anOutput, packageJson.jsonFile);
if (!result.ok) {
placeholder.failures.push(result.error);
continue;
}
values.push(result.value.value);
}
if (!allowUsuallyExcludedPaths && values.length > 0) {
values.push(...DEFAULT_EXCLUDE_PATHS);
}
return { node: outputNode, values };
}
#processClean(placeholder, packageJson, syntaxInfo) {
const defaultValue = true;
if (syntaxInfo.wireitConfigNode == null) {
return defaultValue;
}
const clean = findNodeAtLocation(syntaxInfo.wireitConfigNode, ['clean']);
if (clean !== undefined &&
clean.value !== true &&
clean.value !== false &&
clean.value !== 'if-file-deleted') {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `The "clean" property must be either true, false, or "if-file-deleted".`,
location: {
file: packageJson.jsonFile,
range: { length: clean.length, offset: clean.offset },
},
},
});
return defaultValue;
}
return clean?.value ?? defaultValue;
}
#processService(placeholder, packageJson, syntaxInfo, command, output) {
if (syntaxInfo.wireitConfigNode === undefined) {
return undefined;
}
const serviceNode = findNodeAtLocation(syntaxInfo.wireitConfigNode, [
'service',
]);
if (serviceNode === undefined) {
return undefined;
}
if (serviceNode.value === false) {
return undefined;
}
if (serviceNode.value !== true && serviceNode.type !== 'object') {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `The "service" property must be either true, false, or an object.`,
location: {
file: packageJson.jsonFile,
range: { length: serviceNode.length, offset: serviceNode.offset },
},
},
});
return undefined;
}
let lineMatches = undefined;
if (serviceNode.type === 'object') {
const waitForNode = findNodeAtLocation(serviceNode, ['readyWhen']);
if (waitForNode !== undefined) {
if (waitForNode.type !== 'object') {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `Expected an object.`,
location: {
file: packageJson.jsonFile,
range: { length: serviceNode.length, offset: serviceNode.offset },
},
},
});
}
else {
const lineMatchesNode = findNodeAtLocation(waitForNode, [
'lineMatches',
]);
if (lineMatchesNode !== undefined) {
if (lineMatchesNode.type !== 'string') {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `Expected a string.`,
location: {
file: packageJson.jsonFile,
range: {
length: lineMatchesNode.length,
offset: lineMatchesNode.offset,
},
},
},
});
}
else {
try {
lineMatches = new RegExp(lineMatchesNode.value);
}
catch (error) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: String(error),
location: {
file: packageJson.jsonFile,
range: {
length: lineMatchesNode.length,
offset: lineMatchesNode.offset,
},
},
},
});
}
}
}
}
}
}
if (command === undefined) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `A "service" script must have a "command".`,
location: {
file: packageJson.jsonFile,
range: {
length: serviceNode.length,
offset: serviceNode.offset,
},
},
},
});
}
if (output !== undefined) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `A "service" script cannot have an "output".`,
location: {
file: packageJson.jsonFile,
range: {
length: output.node.length,
offset: output.node.offset,
},
},
},
});
}
return { readyWhen: { lineMatches } };
}
async #processPackageLocks(placeholder, packageJson, syntaxInfo, files) {
if (syntaxInfo.wireitConfigNode === undefined) {
return;
}
const packageLocksNode = findNodeAtLocation(syntaxInfo.wireitConfigNode, [
'packageLocks',
]);
let packageLocks;
if (packageLocksNode !== undefined) {
const result = failUnlessArray(packageLocksNode, packageJson.jsonFile);
if (!result.ok) {
placeholder.failures.push(result.error);
}
else {
packageLocks = { node: packageLocksNode, values: [] };
const children = packageLocksNode.children ?? [];
for (const maybeFilename of children) {
const result = failUnlessNonBlankString(maybeFilename, packageJson.jsonFile);
if (!result.ok) {
placeholder.failures.push(result.error);
continue;
}
const filename = result.value;
if (filename.value !== pathlib.basename(filename.value)) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: `A package lock must be a filename, not a path`,
location: {
file: packageJson.jsonFile,
range: { length: filename.length, offset: filename.offset },
},
},
});
continue;
}
packageLocks.values.push(filename.value);
}
}
}
if (
// There's no reason to check package locks when "files" is undefined,
// because scripts will always run in that case anyway.
files !== undefined &&
// An explicitly empty "packageLocks" array disables package lock checking
// entirely.
packageLocks?.values.length !== 0) {
const lockfileNames = packageLocks?.values ?? DEFAULT_LOCKFILES[this.#agent];
// Generate "package-lock.json", "../package-lock.json",
// "../../package-lock.json" etc. all the way up to the root of the
// filesystem, because that's how Node package resolution works.
const depth = placeholder.packageDir.split(pathlib.sep).length;
const paths = [];
for (let i = 0; i < depth; i++) {
// Glob patterns are specified with forward-slash delimiters, even on
// Windows.
const prefix = Array(i + 1).join('../');
for (const lockfileName of lockfileNames) {
paths.push(prefix + lockfileName);
}
}
// Only add the package locks that currently exist to the list of files
// for this script. This way, in watch mode we won't create watchers for
// all parent directories, just in case a package lock file is created at
// some later time during watch, which is a rare and not especially
// important event. Creating watchers for all parent directories is
// potentially expensive, and on Windows will also result in occasional
// errors.
const existing = await Promise.all(paths.map(async (path) => {
try {
await fs.access(pathlib.join(placeholder.packageDir, path));
return path;
}
catch {
return undefined;
}
}));
for (const path of existing) {
if (path !== undefined) {
files.values.push(path);
}
}
}
}
#processEnv(placeholder, packageJson, syntaxInfo, command) {
if (syntaxInfo.wireitConfigNode === undefined) {
return {};
}
const envNode = findNodeAtLocation(syntaxInfo.wireitConfigNode, ['env']);
if (envNode === undefined) {
return {};
}
if (command === undefined) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: 'Can\'t set "env" unless "command" is set',
location: {
file: packageJson.jsonFile,
range: { length: envNode.length, offset: envNode.offset },
},
},
});
}
if (envNode.type !== 'object') {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: 'Expected an object',
location: {
file: packageJson.jsonFile,
range: { length: envNode.length, offset: envNode.offset },
},
},
});
}
if (envNode.children === undefined) {
return {};
}
const entries = [];
for (const propNode of envNode.children) {
if (propNode.children === undefined || propNode.children.length !== 2) {
throw new Error('Internal error: expected object JSON node children to be key/val pairs');
}
const keyValueResult = failUnlessKeyValue(propNode, propNode.children, packageJson.jsonFile);
if (!keyValueResult.ok) {
placeholder.failures.push(keyValueResult.error);
continue;
}
const [key, val] = keyValueResult.value;
if (key.type !== 'string') {
throw new Error('Internal error: expected object JSON node child key to be string');
}
const keyStr = key.value;
if (val.type === 'string') {
entries.push([keyStr, val.value]);
}
else if (val.type !== 'object') {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: 'Expected a string or object',
location: {
file: packageJson.jsonFile,
range: { length: val.length, offset: val.offset },
},
},
});
continue;
}
else {
const externalNode = findNodeAtLocation(val, ['external']);
if (externalNode?.value !== true) {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholder,
diagnostic: {
severity: 'error',
message: 'Expected "external" to be true',
location: {
file: packageJson.jsonFile,
range: {
length: (externalNode ?? val).length,
offset: (externalNode ?? val).offset,
},
},
},
});
continue;
}
const defaultNode = findNodeAtLocation(val, ['default']);
if (defaultNode && defaultNode.type !== 'string') {
placeholder.failures.push({
type: 'failure',
reason: 'invalid-config-syntax',
script: placeholde