flow-typed
Version:
A repository of high quality flow type definitions
844 lines (685 loc) • 31.3 kB
JavaScript
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.name = exports.description = exports.TEST_DIR = void 0;
exports.run = run;
exports.selectFlowTestVersions = selectFlowTestVersions;
exports.setup = setup;
exports.writeFlowConfig = writeFlowConfig;
var _colors = _interopRequireDefault(require("colors"));
var _node = require("../lib/node.js");
var _fileUtils = require("../lib/fileUtils.js");
var _github = require("../lib/github.js");
var _npmLibDefs = require("../lib/npm/npmLibDefs");
var _libDefs = require("../lib/libDefs.js");
var _logger = require("../lib/logger");
var _isInFlowTypedRepo = _interopRequireDefault(require("../lib/isInFlowTypedRepo"));
var _flowVersion = require("../lib/flowVersion");
var _git = require("../lib/git");
var _got = _interopRequireDefault(require("got"));
var semver = _interopRequireWildcard(require("semver"));
var _nodeStreamZip = _interopRequireDefault(require("node-stream-zip"));
var _ValidationError = require("../lib/ValidationError");
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Used to decide which binary to fetch for each version of Flow.
*
* Return type matches the binaries returned by flow releases
* https://github.com/facebook/flow/releases
*
* eg: If the binary for the OS is `flow-linux-arm64-v0.206.0.zip`
* we will return `linux-arm64`
*/
const BIN_PLATFORM = (() => {
const arch = _node.os.arch();
switch (_node.os.type()) {
case 'Linux':
if (arch === 'arm') {
return 'linux-arm64';
}
return 'linux64';
case 'Darwin':
if (arch === 'arm') {
return 'osx-arm64';
}
return 'osx';
case 'Windows_NT':
return 'win64';
default:
throw new Error('Unsupported os.type()! ' + _node.os.type());
}
})();
const PKG_ROOT_DIR = _node.path.join(__dirname, '..', '..');
const TEST_DIR = _node.path.join(PKG_ROOT_DIR, '.test-dir');
exports.TEST_DIR = TEST_DIR;
const BIN_DIR = _node.path.join(PKG_ROOT_DIR, '.flow-bins-cache');
const P = Promise;
/**
* Scan the definitions/ directory to extract a flat list of TestGroup
* structs. Each TestGroup represents a Package/PackageVersion/FlowVersion
* directory.
*/
const basePathRegex = new RegExp('definitions/(npm|environments)/(@[^/]*/)?[^/]*/?');
async function getTestGroups(repoDirPath, envDirPath, onlyChanged = false) {
let libDefs = await (0, _libDefs.getLibDefs)(repoDirPath);
let envDefs = await (0, _libDefs.getLibDefs)(envDirPath);
if (onlyChanged) {
const diff = await (0, _git.getDefinitionsDiff)();
const baseDiff = diff.map(d => {
if (d.endsWith('CODEOWNERS')) {
return '';
}
const match = d.match(basePathRegex);
if (match) {
return match[0];
}
return '';
}).filter(d => d !== '');
const changedDefs = baseDiff.map(d => {
const {
pkgName,
pkgVersion
} = (0, _libDefs.parseRepoDirItem)(d);
const {
major,
minor,
patch
} = pkgVersion;
return {
name: pkgName,
version: `v${major}.${minor}.${patch}`
};
});
libDefs = [...libDefs, ...envDefs].filter(def => {
return changedDefs.some(d => {
// This is for env defs
if (d.version === 'vx.x.x') {
return d.name === def.pkgName;
} // If the definition is a dependant of a changed package
if (def.configPath) {
const deps = JSON.parse(_node.fs.readFileSync(def.configPath, 'utf-8')).deps;
const isDependantOfChanged = Object.keys(deps).some(dep => dep === d.name && deps[dep].some(s => s === d.version));
if (isDependantOfChanged) return true;
}
return d.name === def.pkgName && d.version === def.pkgVersionStr;
});
});
}
return libDefs.map(libDef => {
const groupID = `${libDef.pkgName}_${libDef.pkgVersionStr}/${libDef.flowVersionStr}`;
const deps = libDef.configPath ? JSON.parse(_node.fs.readFileSync(libDef.configPath, 'utf-8')).deps : {};
return {
id: groupID,
testFilePaths: libDef.testFilePaths,
libDefPath: libDef.path,
flowVersion: libDef.flowVersion,
deps
};
});
}
function printSkipMessage(flowVersion, githubUrl) {
console.log('==========================================================================================');
console.log(`We are temporarily skipping ${flowVersion} due to ${githubUrl}`);
console.log('==========================================================================================');
} // Once we've fetched flow bin releases they will be cached
// For subsequent tests within the same test run.
let _flowReleases = [];
/**
* Memoized function that queries the GitHub releases for Flow, downloads the
* zip for each version, extracts the zip, and moves the binary to
* TEST_BIN/flow-vXXX for use later when running tests.
*/
async function getOrderedFlowBinVersions(numberOfReleases, flowVersion) {
(0, _logger.sectionHeader)('Fetching flow binaries...');
const IS_WINDOWS = _node.os.type() === 'Windows_NT';
const GH_CLIENT = (0, _github.gitHubClient)();
const OS_ARCH_FILTER_RE = new RegExp(`flow-${BIN_PLATFORM}-v`); // Fetching all available flow versions
// before deciding which to run
if (_flowReleases.length === 0) {
let foundAllFlowVersions = false;
let page = 1;
while (!foundAllFlowVersions) {
try {
const pageVersions = await GH_CLIENT.repos.listReleases({
owner: 'facebook',
repo: 'flow',
page,
per_page: 50
});
(0, _logger.listItem)(`Fetched page: ${page}`);
page += 1;
_flowReleases.push(...pageVersions.data);
if (pageVersions.data.length === 0) {
foundAllFlowVersions = true;
}
} catch (e) {
console.error(e);
foundAllFlowVersions = true;
}
}
}
const filteredVersions = selectFlowTestVersions(_flowReleases.map(o => o.tag_name), flowVersion, numberOfReleases);
const flowBins = _flowReleases.filter(ver => filteredVersions.includes(ver.tag_name)).filter(rel => {
if (rel.tag_name.endsWith('-rc')) {
return false;
}
if (rel.tag_name === 'v0.67.0') {
printSkipMessage(rel.tag_name, 'https://github.com/facebook/flow/issues/5922');
return false;
} else if (rel.tag_name === 'v0.63.0' || rel.tag_name === 'v0.70.0') {
printSkipMessage(rel.tag_name, 'https://github.com/flowtype/flow-typed/issues/2422');
return false;
} else if (semver.lt(rel.tag_name, '0.53.0')) {
console.log('flow-typed only supports flow 0.53.0 and newer');
return false;
} else if (IS_WINDOWS && (semver.eq(rel.tag_name, '0.57.0') || semver.eq(rel.tag_name, '0.57.1') || semver.eq(rel.tag_name, '0.57.2'))) {
// Because flow 0.57 was broken before 0.57.3 on the Windows platform, we also skip those versions when running on windows.
return false;
}
return true;
}).map(rel => {
// Find the binary zip in the list of assets
const binZip = rel.assets.filter(({
name
}) => {
return OS_ARCH_FILTER_RE.test(name) && !/-latest.zip$/.test(name);
}).map(asset => asset.browser_download_url);
if (binZip.length !== 1) {
throw new Error('Unexpected number of ' + BIN_PLATFORM + ' assets for flow-' + rel.tag_name + '! ' + JSON.stringify(binZip));
} else {
const version = rel.tag_name[0] === 'v' ? rel.tag_name : 'v' + rel.tag_name;
return {
version,
binURL: binZip[0]
};
}
}).sort((a, b) => {
return semver.lt(a.version, b.version) ? -1 : 1;
});
await P.all(flowBins.map(async ({
version,
binURL
}) => {
const zipPath = _node.path.join(BIN_DIR, 'flow-' + version + '.zip');
const binPath = _node.path.join(BIN_DIR, 'flow-' + version + (IS_WINDOWS ? '.exe' : ''));
if (await _node.fs.exists(binPath)) {
return;
} // Download the zip file
await new Promise((res, rej) => {
console.log(' Fetching flow-%s...', version);
_got.default.stream(binURL, {
headers: {
'User-Agent': 'flow-typed Test Runner (github.com/flowtype/flow-typed)'
}
}).on('error', err => rej(err)).pipe(_node.fs.createWriteStream(zipPath).on('close', () => {
console.log(' flow-%s finished downloading.', version);
res();
}));
}); // Extract the flow binary
const flowBinDirPath = _node.path.join(BIN_DIR, 'TMP-flow-' + version);
await _node.fs.mkdir(flowBinDirPath);
console.log(' Extracting flow-%s...', version);
const zip = new _nodeStreamZip.default.async({
file: zipPath
});
const extractedCount = await zip.extract(null, flowBinDirPath);
console.log(' Extracted %s entries', extractedCount);
if (IS_WINDOWS) {
await _node.fs.rename(_node.path.join(flowBinDirPath, 'flow', 'flow.exe'), _node.path.join(BIN_DIR, 'flow-' + version + '.exe'));
} else {
await _node.fs.rename(_node.path.join(flowBinDirPath, 'flow', 'flow'), _node.path.join(BIN_DIR, 'flow-' + version));
await _node.child_process.execP(['chmod', '755', _node.path.join(BIN_DIR, 'flow-' + version)].join(' '));
}
console.log(' Removing flow-%s artifacts...', version);
await P.all([(0, _fileUtils.recursiveRmdir)(flowBinDirPath), _node.fs.unlink(zipPath)]);
console.log(' flow-%s complete!', version);
}));
console.log('Finished fetching Flow binaries.\n');
return flowBins.map(bin => bin.version);
}
const flowNameRegex = /^flow-v[0-9]+.[0-9]+.[0-9]+(\.exe)?$/;
/**
* flow filename should be `flow-vx.x.x`
* @param {string} name
*/
function checkFlowFilename(name) {
return flowNameRegex.test(name);
}
/**
* Return the sorted list of cached flow binaries that have previously been retrieved from github
* and cached in the `.flow-bins-cache` directory. This function is usually called when a failure
* has occurred when attempting to refresh the flow releases from github, i.e. offline or over
* API limit.
*/
async function getCachedFlowBinVersions(numberOfReleases, flowVersion) {
// read the files with name `flow-vx.x.x` from the bin dir and remove the leading `flow-` prefix
const versions = (await _node.fs.readdir(_node.path.join(BIN_DIR))).filter(checkFlowFilename).map(dir => dir.slice('flow-'.length)); // sort the versions that we have in-place
// removing the `v` prefix before semver comparison
versions.sort((a, b) => {
return semver.lt(a.substring(1), b.substring(1)) ? -1 : 1;
});
return selectFlowTestVersions(versions, flowVersion, numberOfReleases);
}
async function writeFlowConfig(repoDirPath, testDirPath, libDefPath, version, depPaths) {
// /!\---------------------------------------------------------------------/!\
// Whenever you introduce a new difference depending on the version, don't
// forget to update the constant array CONFIGURATION_CHANGE_VERSIONS.
// /!\---------------------------------------------------------------------/!\
const destFlowConfigPath = _node.path.join(testDirPath, '.flowconfig');
const flowConfigData = ['[libs]', ...depPaths, _node.path.basename(libDefPath), _node.path.join(repoDirPath, '..', '__util__', 'tdd_framework.js'), '', '[options]', 'include_warnings=true', 'server.max_workers=0', semver.gte(version, '0.200.0') ? 'exact_by_default=true' : '', // from version 0.202.0 default is true
// Fixes out of shared memory error for Mac Rosetta 2, see https://github.com/facebook/flow/issues/8538
'sharedmemory.heap_size=3221225472', semver.lt(version, '0.125.0') ? 'suppress_comment=\\\\(.\\\\|\\n\\\\)*\\\\$FlowExpectedError' : '', '', // Be sure to ignore stuff in the node_modules directory of the flow-typed
// CLI repository!
'[ignore]', _node.path.join(testDirPath, '..', '..', 'node_modules'), '', '[lints]', semver.gte(version, '0.104.0') && semver.lt(version, '0.201.0') ? 'implicit-inexact-object=error' : '', semver.gte(version, '0.201.0') ? 'ambiguous-object-type=error' : ''].join('\n');
await _node.fs.writeFile(destFlowConfigPath, flowConfigData);
}
function testTypeDefinition(flowVer, testDirPath) {
return new Promise(res => {
const IS_WINDOWS = _node.os.type() === 'Windows_NT';
const child = _node.child_process.exec([_node.path.join(BIN_DIR, 'flow-' + flowVer + (IS_WINDOWS ? '.exe' : '')), 'check', '--strip-root', '--all', testDirPath].join(' '));
let stdErrOut = '';
child.stdout.on('data', data => stdErrOut += data);
child.stderr.on('data', data => stdErrOut += data);
child.on('error', execError => {
res({
stdErrOut,
errCode: null,
execError
});
});
child.on('close', errCode => {
res({
stdErrOut,
errCode,
execError: null
});
});
});
}
async function runFlowTypeDefTests(flowVersionsToRun, groupId, testDirPath) {
const errors = [];
while (flowVersionsToRun.length > 0) {
// Run tests in batches to avoid saturation
const testBatch = flowVersionsToRun.slice(0, Math.min(flowVersionsToRun.length, 5)).map(group => (flowVersionsToRun.shift(), group));
await P.all(testBatch.map(async flowVer => {
const testRunId = groupId + ' (flow-' + flowVer + ')';
console.log('Testing %s...', testRunId);
const {
stdErrOut,
errCode,
execError
} = await testTypeDefinition(flowVer, testDirPath);
if (execError !== null) {
errors.push(testRunId + ': Error executing Flow process: ' + execError.stack);
} else if (!stdErrOut.endsWith('Found 0 errors\n')) {
errors.push(testRunId + ': Unexpected Flow errors (' + String(errCode) + '):\n\n' + stdErrOut);
}
}));
}
return errors;
}
async function testLowestCapableFlowVersion(lowerVersions, testDirPath, lowestFlowVersionRan) {
let lowerFlowVersionsToRun = lowerVersions;
let lowestCapableFlowVersion = lowestFlowVersionRan;
while (lowerFlowVersionsToRun.length > 0) {
const lowerTestBatch = lowerFlowVersionsToRun.slice(0, Math.min(lowerFlowVersionsToRun.length, 5)).map(group => (lowerFlowVersionsToRun.shift(), group));
await P.all(lowerTestBatch.map(async flowVer => {
const {
stdErrOut,
execError
} = await testTypeDefinition(flowVer, testDirPath);
if (execError !== null || !stdErrOut.endsWith('Found 0 errors\n')) {
lowerFlowVersionsToRun = [];
} else {
lowestCapableFlowVersion = semver.lt(lowestCapableFlowVersion, flowVer) ? lowestCapableFlowVersion : flowVer;
}
}));
}
return lowestCapableFlowVersion;
}
async function findLowestCapableFlowVersion(repoDirPath, orderedFlowVersions, lowestFlowVersionRan, testDirPath, libDefPath, depPaths) {
let lowerFlowVersionsToRun = orderedFlowVersions.filter(flowVer => {
return semver.lt(flowVer, lowestFlowVersionRan);
});
lowerFlowVersionsToRun.reverse();
await writeFlowConfig(repoDirPath, testDirPath, libDefPath, lowestFlowVersionRan, depPaths);
return await testLowestCapableFlowVersion(lowerFlowVersionsToRun, testDirPath, lowestFlowVersionRan);
}
/**
* Remove all files except flow instances
*/
async function removeTrashFromBinDir() {
(await _node.fs.readdir(_node.path.join(BIN_DIR))).filter(name => !checkFlowFilename(name)).forEach(async el => {
const dir = _node.path.resolve(BIN_DIR, el);
if (!_node.fs.exists(dir)) {
await _node.fs.unlink(dir);
}
});
} // These versions introduce a change in the .flowconfig file. We need to make
// sure that the flowconfig file is rewritten when reaching these versions.
// MAKE SURE TO PREPEND THE VERSION WITH "v".
const CONFIGURATION_CHANGE_VERSIONS = ['v0.104.0', // Adding lint
'v0.125.0', // Remove suppress_comments
'v0.200.0' // exact_by_default become required
];
/**
* This function splits a list of flow versions into a list of lists of versions
* where the flowconfig configuration is compatible.
* The list of versions need to be sorted for this function to work properly.
*/
function partitionListOfFlowVersionsPerConfigChange(flowVersions) {
const result = [];
let lastIndex = 0;
for (const pivotVersion of CONFIGURATION_CHANGE_VERSIONS) {
const index = flowVersions.indexOf(pivotVersion, lastIndex + 1);
if (index < 0) {
continue;
} // We extract a new group up to the pivot version excluded (because the
// pivot version introduced the configuration incompatibility).
result.push(flowVersions.slice(lastIndex, index));
lastIndex = index;
}
result.push(flowVersions.slice(lastIndex));
return result;
}
/**
* Map through every dependency + their versions and replace with
* the definition's path,
* recursively including those dependency's dependencies.
*
* Then shuffle to create a new Array<Array<>> that will test
* All dependencies across various supported versions.
*/
function getDepTestGroups(testGroup) {
const flowDirVersion = (0, _flowVersion.extractFlowDirFromFlowDirPath)(testGroup.id);
const depBasePath = (0, _npmLibDefs.getNpmLibDefDirFromNested)(testGroup.libDefPath);
const getMappedDepPaths = deps => {
return Object.keys(deps).map(depName => {
const nameSplit = depName.split('/');
const scope = nameSplit.length > 1 ? `${nameSplit[0]}/` : '';
const packageName = nameSplit.length > 1 ? nameSplit[1] : depName;
return deps[depName].map(version => {
const flowDirPath = `${depBasePath}${scope}${packageName}_${version}/${flowDirVersion}`;
const defPath = `${flowDirPath}/${packageName}_${version}.js`;
if (!_node.fs.existsSync(defPath)) {
throw new Error(_colors.default.red(`${depName}@${version} cannot be a dependency of ${testGroup.id} because either the dependency@version does't exist or they do not have matching flow version ranges`));
} // For the current dependency check if it has nested dependencies
let defDeps;
try {
defDeps = JSON.parse(_node.fs.readFileSync(`${flowDirPath}/config.json`, 'utf-8')).deps;
} catch (e) {}
return {
main: defPath,
// Create a recursive list of main def and def dep paths
// that are later used to inject into the test group so that
// dependencies can resolve their dependencies.
//
// Note: This strategy doesn't create an exhaustive list validating
// a dependency to each dep version of dependency's dependency.
// That isn't necessary in this instance and would be tested by
// the dependency's own test group.
deps: [...(defDeps ? getMappedDepPaths(defDeps).reduce((acc, cur) => {
return [...acc, cur[0].main, ...cur[0].deps];
}, []) : [])]
};
});
});
};
const mappedDepPaths = getMappedDepPaths(testGroup.deps);
const longestDep = mappedDepPaths.reduce((acc, cur) => {
if (cur.length > acc) {
return cur.length;
}
return acc;
}, 0);
const depGroup = [];
for (let i = 0, len = longestDep; i < len; i++) {
const newGroup = [];
mappedDepPaths.forEach(o => {
if (!o[i]) {
newGroup.push(o[0].main, ...o[0].deps);
} else {
newGroup.push(o[i].main, ...o[i].deps);
}
});
depGroup.push(newGroup);
}
return depGroup;
}
/**
* Given a TestGroup structure determine all versions of Flow that match the
* FlowVersion specification and, for each, run `flow check` on the test
* directory.
*/
async function runTestGroup(repoDirPath, testGroup, orderedFlowVersions) {
// Some older versions of Flow choke on ">"/"<"/"="
const testDirName = testGroup.id.replace(/\//g, '--').replace(/>/g, 'gt').replace(/</g, 'lt').replace(/=/g, 'eq');
const testDirPath = _node.path.join(TEST_DIR, testDirName);
if (await _node.fs.exists(testDirPath)) {
throw new Error(`Trying to run ${testGroup.id}, but test dir already exists! ` + `Bailing out!`);
}
try {
await _node.fs.mkdir(testDirPath); // Copy files into the test dir
const destLibDefPath = _node.path.join(testDirPath, _node.path.basename(testGroup.libDefPath));
const copiedFileNames = new Set();
await P.all([P.all(testGroup.testFilePaths.map(async (filePath, idx) => {
// Because there could be multiple test files with the same basename,
// we disambiguate each one with a locally-unique index.
//
// i.e. underscore/v1.x.x/test-underscore.js
// underscore/v1.x.x/flow-v0.22.x/test-underscore.js
//
// Only mangles the name when there's a naming collision. Otherwise, uses the original.
const fileName = _node.path.basename(filePath);
const destBasename = copiedFileNames.has(fileName) ? `${idx}-${fileName}` : fileName;
copiedFileNames.add(destBasename);
await (0, _fileUtils.copyFile)(filePath, _node.path.join(testDirPath, destBasename));
})), await (0, _fileUtils.copyFile)(testGroup.libDefPath, destLibDefPath)]); // For each compatible version of Flow, run `flow check` and verify there
// are no errors.
const testGrpFlowSemVerRange = (0, _flowVersion.toSemverString)(testGroup.flowVersion);
const flowVersionsToRun = orderedFlowVersions.filter(flowVer => {
return semver.satisfies(flowVer, testGrpFlowSemVerRange);
}); // Windows hasn't flow < 30.0 but we have tests for flow < 30.0. We need skip it. Example: redux_v3
if (!flowVersionsToRun.length) {
return [];
}
const groups = partitionListOfFlowVersionsPerConfigChange(flowVersionsToRun);
const flowErrors = [];
const executeTests = async (depPaths = []) => {
for (const sublistOfFlowVersions of groups) {
const lowestFlowVersionRanInThisGroup = sublistOfFlowVersions[0];
await writeFlowConfig(repoDirPath, testDirPath, testGroup.libDefPath, lowestFlowVersionRanInThisGroup, depPaths);
const flowErrorsForThisGroup = await runFlowTypeDefTests([...sublistOfFlowVersions], testGroup.id, testDirPath);
flowErrors.push(...flowErrorsForThisGroup);
}
const lowestFlowVersionRan = flowVersionsToRun[0];
const lowestCapableFlowVersion = await findLowestCapableFlowVersion(repoDirPath, orderedFlowVersions, lowestFlowVersionRan, testDirPath, testGroup.libDefPath, depPaths);
if (lowestCapableFlowVersion !== lowestFlowVersionRan) {
console.log(`Tests for ${testGroup.id} ran successfully on flow ${lowestCapableFlowVersion}.
Consider setting ${lowestCapableFlowVersion} as the lower bound!`);
}
};
if (Object.keys(testGroup.deps).length > 0) {
const depsTestGroups = getDepTestGroups(testGroup);
for (const depPaths of depsTestGroups) {
await executeTests(depPaths);
}
} else {
await executeTests();
}
return flowErrors;
} finally {
if (await _node.fs.exists(testDirPath)) {
await (0, _fileUtils.recursiveRmdir)(testDirPath);
}
}
}
/**
* Takes a list of ordered flow version string in the format of
* `vx.x.x` from latest to oldest and returns a filtered list of flow versions
* depending on the definitions testing requirements
*/
function selectFlowTestVersions(orderedFlowVersions, flowVersion, numberOfFlowVersions) {
// This retains original logic in cases
// where we want all or specific flow versions for testing.
// This is a pretty uncommon use case though.
if (flowVersion.kind === 'all' || flowVersion.kind === 'specific') {
return orderedFlowVersions.slice(0, numberOfFlowVersions);
}
const {
upper,
lower
} = flowVersion;
const selectedFlowVersions = [];
const findSelectedFlowVersions = flowVersions => {
let versionExceedsLower = false;
while (selectedFlowVersions.length < numberOfFlowVersions && !versionExceedsLower) {
const version = flowVersions[selectedFlowVersions.length];
const [major, minor, patch] = version.substring(1).split('.');
if (lower && (lower.major === 'x' || lower.major <= Number(major)) && (lower.minor === 'x' || lower.minor <= Number(minor)) && (lower.patch === 'x' || lower.patch <= Number(patch))) {
selectedFlowVersions.push(version);
} else {
versionExceedsLower = true;
}
}
};
if (upper == null) {
findSelectedFlowVersions(orderedFlowVersions);
} else {
// If upper exists we should find the first version that satisfies
// upper and find the first `numberOfFlowVersions` until we hit the
// lower boundary.
const upperFlowVersion = orderedFlowVersions.findIndex(o => {
const [major, minor, patch] = o.substring(1).split('.');
if ((upper.major === 'x' || upper.major >= Number(major)) && (upper.minor === 'x' || upper.minor >= Number(minor)) && (upper.patch === 'x' || upper.patch >= Number(patch))) {
return true;
}
});
const upperBoundFlowVersions = orderedFlowVersions.slice(upperFlowVersion);
findSelectedFlowVersions(upperBoundFlowVersions);
}
return selectedFlowVersions;
}
async function runTests(repoDirPath, envDirPath, testPatterns, onlyChanged, numberOfFlowVersions) {
const testPatternRes = testPatterns.map(patt => new RegExp(patt, 'g'));
const testGroups = (await getTestGroups(repoDirPath, envDirPath, onlyChanged)).filter(testGroup => {
if (testPatternRes.length === 0) {
return true;
}
for (var i = 0; i < testPatternRes.length; i++) {
const pattern = testPatternRes[i];
if (testGroup.id.match(pattern) != null) {
return true;
} // For a definition, if their dependencies have modifications
// we should run tests against the definition to ensure changes
// to the dependency has not broken it's dependents
// by adding the matched definition to the test group.
const depsList = Object.keys(testGroup.deps);
if (depsList.length > 0) {
return depsList.some(dep => {
return testGroup.deps[dep].some(o => `${dep}_${o}`.match(pattern));
});
}
}
return false;
});
try {
// Create a temp dir to copy files into to run the tests
if (await _node.fs.exists(TEST_DIR)) {
await (0, _fileUtils.recursiveRmdir)(TEST_DIR);
}
await _node.fs.mkdir(TEST_DIR);
const results = new Map();
while (testGroups.length > 0) {
const testGroup = testGroups.shift(); // Prepare bin folder to collect flow instances
await removeTrashFromBinDir();
let orderedFlowVersions;
try {
orderedFlowVersions = await getOrderedFlowBinVersions(numberOfFlowVersions, testGroup.flowVersion);
} catch (e) {
orderedFlowVersions = await getCachedFlowBinVersions(numberOfFlowVersions, testGroup.flowVersion);
}
const testGroupErrors = await runTestGroup(repoDirPath, testGroup, orderedFlowVersions);
if (testGroupErrors.length > 0) {
const errors = results.get(testGroup.id) || [];
testGroupErrors.forEach(err => errors.push(err));
results.set(testGroup.id, errors);
}
}
return results;
} finally {
if (await _node.fs.exists(TEST_DIR)) {
await (0, _fileUtils.recursiveRmdir)(TEST_DIR);
}
}
}
const name = 'run-tests [testPatterns...]';
exports.name = name;
const description = 'Run definition tests for library definitions in the flow-typed project';
exports.description = description;
function setup(yargs) {
return yargs.usage(`$0 ${name} - ${description}`).positional('testPatterns', {
describe: 'Test patterns',
default: []
}).options({
path: {
describe: 'Override default path for libdef root (Mainly for testing purposes)',
type: 'string',
demandOption: false
},
onlyChanged: {
type: 'boolean',
description: 'Run only changed definition tests',
demandOption: false
},
numberOfFlowVersions: {
type: 'number',
description: 'Only run against the latest X versions of flow',
demandOption: false
}
});
}
async function run(argv) {
if (!(await _node.fs.exists(BIN_DIR))) {
await _node.fs.mkdir(BIN_DIR);
}
if (!(0, _isInFlowTypedRepo.default)()) {
console.log('This command only works in a clone of flowtype/flow-typed. ' + 'It is a tool used to run tests of the library definitions in the flow-typed project.');
return 1;
}
const testPatterns = Array.isArray(argv.testPatterns) ? argv.testPatterns.map(String) : [];
const onlyChanged = Boolean(argv.onlyChanged);
const numberOfFlowVersions = Number(argv.numberOfFlowVersions) || 15;
const cwd = process.cwd();
const basePath = argv.path ? String(argv.path) : cwd;
const cwdDefsNPMPath = _node.path.join(basePath, 'definitions', 'npm');
const repoDirPath = (await _node.fs.exists(cwdDefsNPMPath)) ? cwdDefsNPMPath : _node.path.join(__dirname, '..', '..', '..', 'definitions', 'npm');
const cwdDefsEnvPath = _node.path.join(basePath, 'definitions', 'environments');
const envDirPath = (await _node.fs.exists(cwdDefsEnvPath)) ? cwdDefsEnvPath : _node.path.join(__dirname, '..', '..', '..', 'definitions', 'environments');
console.info(`Running ${onlyChanged ? 'changed' : ''} definition tests against latest ${_colors.default.green(numberOfFlowVersions.toString())} flow versions in ${_node.path.join(repoDirPath, '..')}...\n`);
let results;
try {
results = await runTests(repoDirPath, envDirPath, testPatterns, onlyChanged, numberOfFlowVersions);
} catch (e) {
if (e instanceof _ValidationError.ValidationError) {
console.error(e.message);
return 1;
} else {
throw e;
}
}
Array.from(results).forEach(([testGroupName, errors]) => {
console.log('ERROR: %s', testGroupName);
errors.forEach(err => console.log('* %s\n', err.split('\n').map((line, idx) => {
return idx === 0 ? line : ' ' + line;
}).join('\n')));
});
if (results.size === 0) {
console.log('\n ✨ All tests passed!');
return 0;
}
return 1;
}
;