UNPKG

flow-typed

Version:

A repository of high quality flow type definitions

585 lines (457 loc) 18.8 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports._cacheRepoAssure = exports._REMOTE_REPO_URL = exports._LAST_UPDATED_FILE = exports._CACHE_REPO_GIT_DIR = exports._CACHE_REPO_EXPIRY = exports._CACHE_REPO_DIR = exports.TEST_FILE_NAME_RE = void 0; exports._ensureCacheRepo = ensureCacheRepo; exports.filterLibDefs = filterLibDefs; exports.getCacheLibDefs = getCacheLibDefs; exports.getLibDefs = getLibDefs; exports.parseRepoDirItem = parseRepoDirItem; exports.updateCacheRepo = updateCacheRepo; var _semver = _interopRequireDefault(require("semver")); var _git = require("./git.js"); var _fileUtils = require("./fileUtils.js"); var _node = require("./node.js"); var _semver2 = require("./semver.js"); var _flowVersion = require("./flowVersion.js"); var _ValidationError = require("./ValidationError"); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const P = Promise; const TEST_FILE_NAME_RE = /^test_.*\.js$/; exports.TEST_FILE_NAME_RE = TEST_FILE_NAME_RE; const CACHE_DIR = _node.path.join(_node.os.homedir(), '.flow-typed'); const CACHE_REPO_DIR = _node.path.join(CACHE_DIR, 'repo'); exports._CACHE_REPO_DIR = CACHE_REPO_DIR; const REMOTE_REPO_URL = 'https://github.com/flowtype/flow-typed.git'; exports._REMOTE_REPO_URL = REMOTE_REPO_URL; const LAST_UPDATED_FILE = _node.path.join(CACHE_DIR, 'lastUpdated'); exports._LAST_UPDATED_FILE = LAST_UPDATED_FILE; async function cloneCacheRepo(verbose) { await (0, _fileUtils.mkdirp)(CACHE_REPO_DIR); try { await (0, _git.cloneInto)(REMOTE_REPO_URL, CACHE_REPO_DIR); } catch (e) { writeVerbose(verbose, 'ERROR: Unable to clone the local cache repo.'); throw e; } await _node.fs.writeFile(LAST_UPDATED_FILE, String(Date.now())); } const CACHE_REPO_GIT_DIR = _node.path.join(CACHE_REPO_DIR, '.git'); exports._CACHE_REPO_GIT_DIR = CACHE_REPO_GIT_DIR; async function rebaseCacheRepo(verbose) { if ((await _node.fs.exists(CACHE_REPO_DIR)) && (await _node.fs.exists(CACHE_REPO_GIT_DIR))) { try { await (0, _git.rebaseRepoMainline)(CACHE_REPO_DIR); } catch (e) { writeVerbose(verbose, 'ERROR: Unable to rebase the local cache repo. ' + e.message); return false; } await _node.fs.writeFile(LAST_UPDATED_FILE, String(Date.now())); return true; } else { await cloneCacheRepo(verbose); return true; } } /** * Utility wrapper for ensureCacheRepo with an update expiry of 0 hours. */ async function updateCacheRepo(verbose) { return await ensureCacheRepo(verbose, 0); } /** * Ensure that the CACHE_REPO_DIR exists and is recently rebased. * (else: create/rebase it) */ const CACHE_REPO_EXPIRY = 1000 * 60; // 1 minute exports._CACHE_REPO_EXPIRY = CACHE_REPO_EXPIRY; const _cacheRepoAssure = { lastAssured: 0, pendingAssure: Promise.resolve() }; exports._cacheRepoAssure = _cacheRepoAssure; async function ensureCacheRepo(verbose, cacheRepoExpiry = CACHE_REPO_EXPIRY) { // Only re-run rebase checks if a check hasn't been run in the last 5 minutes if (_cacheRepoAssure.lastAssured + 5 * 1000 * 60 >= Date.now()) { return _cacheRepoAssure.pendingAssure; } _cacheRepoAssure.lastAssured = Date.now(); const prevAssure = _cacheRepoAssure.pendingAssure; return _cacheRepoAssure.pendingAssure = prevAssure.then(() => async function () { const repoDirExists = _node.fs.exists(CACHE_REPO_DIR); const repoGitDirExists = _node.fs.exists(CACHE_REPO_GIT_DIR); if (!(await repoDirExists) || !(await repoGitDirExists)) { writeVerbose(verbose, '• flow-typed cache not found, fetching from GitHub...', false); await cloneCacheRepo(verbose); writeVerbose(verbose, 'done.'); } else { let lastUpdated = 0; if (await _node.fs.exists(LAST_UPDATED_FILE)) { // If the LAST_UPDATED_FILE has anything other than just a number in // it, just assume we need to update. const lastUpdatedRaw = await _node.fs.readFile(LAST_UPDATED_FILE); const lastUpdatedNum = parseInt(lastUpdatedRaw, 10); if (String(lastUpdatedNum) === String(lastUpdatedRaw)) { lastUpdated = lastUpdatedNum; } } if (lastUpdated + cacheRepoExpiry < Date.now()) { writeVerbose(verbose, '• rebasing flow-typed cache...', false); const rebaseSuccessful = await rebaseCacheRepo(verbose); if (rebaseSuccessful) { writeVerbose(verbose, 'done.'); } else { writeVerbose(verbose, "\nNOTE: Unable to rebase local cache! If you don't currently " + "have internet connectivity, no worries -- we'll update the " + 'local cache the next time you do.\n'); } } } }()); } // Exported for tests -- since we really want this part well-tested. async function addLibDefs(pkgDirPath, libDefs) { const parsedDirItem = parseRepoDirItem(pkgDirPath); (await parseLibDefsFromPkgDir(parsedDirItem, pkgDirPath)).forEach(libDef => libDefs.push(libDef)); } /** * Given a 'definitions/...' dir, return a list of LibDefs that it contains. */ async function getLibDefs(defsDir) { const libDefs = []; const defsDirItems = await _node.fs.readdir(defsDir); await P.all(defsDirItems.map(async item => { if ((0, _fileUtils.isExcludedFile)(item)) return; const itemPath = _node.path.join(defsDir, item); const itemStat = await _node.fs.stat(itemPath); if (itemStat.isDirectory()) { if (item.charAt(0) === '@') { // directory is of the form '@<scope>', so go one level deeper const scope = item; const defsDirItems = await _node.fs.readdir(itemPath); await P.all(defsDirItems.map(async item => { if ((0, _fileUtils.isExcludedFile)(item)) return; const itemPath = _node.path.join(defsDir, scope, item); const itemStat = await _node.fs.stat(itemPath); if (itemStat.isDirectory()) { // itemPath is a lib dir await addLibDefs(itemPath, libDefs); } else { const error = `Expected only directories in the 'definitions/npm/@<scope>' directory! Please remove or change ${itemPath}`; throw new _ValidationError.ValidationError(error); } })); } else { // itemPath is a lib dir await addLibDefs(itemPath, libDefs); } } else { const error = `Expected only directories in the 'definitions/npm' directory! Please remove or change ${itemPath}`; throw new _ValidationError.ValidationError(error); } })); return libDefs; } function parsePkgFlowDirVersion(pkgFlowDirPath) { const pkgFlowDirName = _node.path.basename(pkgFlowDirPath); return (0, _flowVersion.parseDirString)(pkgFlowDirName); } /** * Given a parsed package name and version and a path to the package directory * on disk, scan the directory and generate a list of LibDefs for each * flow-versioned definition file. */ async function parseLibDefsFromPkgDir({ pkgName, pkgVersion }, pkgDirPath) { const pkgVersionStr = (0, _semver2.versionToString)(pkgVersion); const pkgDirItems = await _node.fs.readdir(pkgDirPath); const commonTestFiles = []; const flowDirs = []; pkgDirItems.forEach(pkgDirItem => { const pkgDirItemPath = _node.path.join(pkgDirPath, pkgDirItem); const pkgDirItemStat = _node.fs.statSync(pkgDirItemPath); if (pkgDirItemStat.isFile()) { if (_node.path.extname(pkgDirItem) === '.swp') { return; } const isValidTestFile = validateTestFile(pkgDirItemPath); if (isValidTestFile) { commonTestFiles.push(pkgDirItemPath); } } else if (pkgDirItemStat.isDirectory()) { flowDirs.push([pkgDirItemPath, parsePkgFlowDirVersion(pkgDirItemPath)]); } else { throw new _ValidationError.ValidationError('Unexpected directory item: ' + pkgDirItemPath); } }); if (!(0, _flowVersion.disjointVersionsAll)(flowDirs.map(([_, ver]) => ver))) { throw new _ValidationError.ValidationError(`Flow versions not disjoint on ${pkgName}`); } if (flowDirs.length === 0) { throw new _ValidationError.ValidationError(`No libdef files found in ${pkgDirPath}!`); } const libDefs = []; await P.all(flowDirs.map(async ([flowDirPath, flowVersion]) => { var _configPath; const testFilePaths = [].concat(commonTestFiles); const basePkgName = pkgName.charAt(0) === '@' ? pkgName.split(_node.path.sep).pop() : pkgName; const libDefFileName = pkgVersionStr === 'vx.x.x' ? `${basePkgName}.js` : `${basePkgName}_${pkgVersionStr}.js`; let libDefFilePath; let configPath; (await _node.fs.readdir(flowDirPath)).forEach(flowDirItem => { const flowDirItemPath = _node.path.join(flowDirPath, flowDirItem); const flowDirItemStat = _node.fs.statSync(flowDirItemPath); if (flowDirItemStat.isFile()) { // If we couldn't discern the package name, we've already recorded an // error for that -- so try to avoid spurious downstream errors. if (pkgName === 'ERROR') { return; } if (_node.path.extname(flowDirItem) === '.swp') { return; } if (flowDirItem === 'config.json') { configPath = _node.path.join(flowDirPath, flowDirItem); return; } if (flowDirItem === libDefFileName) { libDefFilePath = _node.path.join(flowDirPath, flowDirItem); return; } const isValidTestFile = validateTestFile(flowDirItemPath); if (isValidTestFile) { testFilePaths.push(flowDirItemPath); } } else { const error = 'Unexpected directory item: ' + flowDirItemPath; throw new _ValidationError.ValidationError(error); } }); if (libDefFilePath == null) { libDefFilePath = _node.path.join(flowDirPath, libDefFileName); if (pkgName !== 'ERROR') { const error = `No libdef file found in ${flowDirPath}`; throw new _ValidationError.ValidationError(error); } return; } libDefs.push({ pkgName, pkgVersionStr, configPath: (_configPath = configPath) !== null && _configPath !== void 0 ? _configPath : null, flowVersion: flowVersion, flowVersionStr: (0, _flowVersion.toDirString)(flowVersion), path: libDefFilePath, testFilePaths }); })); return libDefs; } /** * Given the path to a directory item in the 'definitions' directory, parse the * directory's name into a package name and version. */ const REPO_DIR_ITEM_NAME_RE = /^(.*)_v([0-9]+)\.([0-9]+|x)\.([0-9]+|x)(-.*)?$/; function parseRepoDirItem(dirItemPath) { const dirItem = _node.path.basename(dirItemPath); // env definitions don't have versions nor need any sort of name validation if (dirItemPath.includes('definitions/environments')) { return { pkgName: dirItem, pkgVersion: { major: 'x', minor: 'x', patch: 'x', prerel: null } }; } const itemMatches = dirItem.match(REPO_DIR_ITEM_NAME_RE); if (itemMatches == null) { const error = `'${dirItem}' is a malformed definitions/npm/ directory name! ` + `Expected the name to be formatted as <PKGNAME>_v<MAJOR>.<MINOR>.<PATCH>`; throw new _ValidationError.ValidationError(error); } const [_, pkgName, majorStr, minorStr, patchStr, prerel] = itemMatches; const item = _node.path.dirname(dirItemPath).split(_node.path.sep).pop(); const major = validateVersionNumPart(majorStr, 'major', dirItemPath); const minor = validateVersionPart(minorStr, 'minor', dirItemPath); const patch = validateVersionPart(patchStr, 'patch', dirItemPath); return { pkgName: item.charAt(0) === '@' ? `${item}${_node.path.sep}${pkgName}` : pkgName, pkgVersion: { major, minor, patch, prerel: prerel != null ? prerel.substr(1) : prerel } }; } /** * Given a path to an assumed test file, ensure that it is named as expected. */ function validateTestFile(testFilePath) { const testFileName = _node.path.basename(testFilePath); return TEST_FILE_NAME_RE.test(testFileName); } /** * Given a number-only part of a version string (i.e. the `major` part), parse * the string into a number. */ function validateVersionNumPart(part, partName, context) { const num = parseInt(part, 10); if (String(num) !== part) { const error = `'${context}': Invalid ${partName} number: '${part}'. Expected a number.`; throw new _ValidationError.ValidationError(error); } return num; } /** * Given a number-or-wildcard part of a version string (i.e. a `minor` or * `patch` part), parse the string into either a number or 'x'. */ function validateVersionPart(part, partName, context) { if (part === 'x') { return part; } return validateVersionNumPart(part, partName, context); } /** * Given a path to a 'definitions' dir, assert that the currently-running * version of the CLI is compatible with the repo. */ async function verifyCLIVersion(defsDirPath) { var _semver$coerce; const metadataFilePath = _node.path.join(defsDirPath, '.cli-metadata.json'); const metadata = await _node.fs.readJson(metadataFilePath); if (!metadata.compatibleCLIRange) { throw new Error(`Unable to find the 'compatibleCLIRange' property in ` + `${metadataFilePath}. You might need to update to a newer version of ` + `the Flow CLI.`); } const minCLIVersion = metadata.compatibleCLIRange; const thisCLIVersion = require('../../package.json').version; if (!_semver.default.satisfies((_semver$coerce = _semver.default.coerce(thisCLIVersion)) !== null && _semver$coerce !== void 0 ? _semver$coerce : thisCLIVersion, minCLIVersion)) { throw new Error(`Please upgrade your CLI version! This CLI is version ` + `${thisCLIVersion}, but the latest flow-typed definitions are only ` + `compatible with flow-typed@${minCLIVersion}`); } } /** * Helper function to write verbose output only when an output stream was * provided. */ function writeVerbose(stream, msg, writeNewline = true) { if (stream != null) { stream.write(msg + (writeNewline ? '\n' : '')); } } /** * Get a list of LibDefs from the flow-typed cache repo checkout. * * If the repo checkout does not exist or is out of date, it will be * created/updated automatically first. */ const CACHE_REPO_DEFS_DIR = _node.path.join(CACHE_REPO_DIR, 'definitions', 'npm'); async function getCacheLibDefs(verbose = process.stdout) { await ensureCacheRepo(verbose); await verifyCLIVersion(_node.path.join(CACHE_REPO_DIR, 'definitions')); return getLibDefs(CACHE_REPO_DEFS_DIR); } function packageNameMatch(a, b) { return a.toLowerCase() === b.toLowerCase(); } function libdefMatchesPackageVersion(pkgSemver, defVersionRaw) { // The libdef version should be treated as a semver prefixed by a carat // (i.e: "foo_v2.2.x" is the same range as "^2.2.x") // UNLESS it is prefixed by the equals character (i.e. "foo_=v2.2.x") let defVersion = defVersionRaw; if (defVersionRaw[0] !== '=' && defVersionRaw[0] !== '^') { defVersion = '^' + defVersionRaw; } if (_semver.default.valid(pkgSemver)) { // test the single package version against the libdef range return _semver.default.satisfies(pkgSemver, defVersion); } if (_semver.default.valid(defVersion)) { // test the single defVersion agains the package range return _semver.default.satisfies(defVersion, pkgSemver); } const pkgRange = new _semver.default.Range(pkgSemver); const defRange = new _semver.default.Range(defVersion); if (defRange.set[0].length !== 2) { throw Error('Invalid libDef version, It appears to be a non-contiguous range.'); } const defLowerB = defRange.set[0][0].semver.version; const defUpperB = defRange.set[0][1].semver.version; if (_semver.default.gtr(defLowerB, pkgSemver) || _semver.default.ltr(defUpperB, pkgSemver)) { return false; } const pkgLowerB = pkgRange.set[0][0].semver.version; return defRange.test(pkgLowerB); } /** * Filter a given list of LibDefs down using a specified filter. */ function filterLibDefs(defs, filter) { return defs.filter(def => { let filterMatch = false; switch (filter.type) { case 'exact': filterMatch = packageNameMatch(def.pkgName, filter.pkgName) && libdefMatchesPackageVersion(filter.pkgVersionStr, def.pkgVersionStr); break; case 'exact-name': filterMatch = packageNameMatch(def.pkgName, filter.term); break; case 'fuzzy': filterMatch = def.pkgName.toLowerCase().indexOf(filter.term.toLowerCase()) !== -1; break; default: throw new Error(`'${filter.type}' is an unexpected filter type! This should never ` + `happen!`); } if (!filterMatch) { return false; } const filterFlowVerStr = filter.flowVersionStr; if (filterFlowVerStr) { const { flowVersion } = def; switch (flowVersion.kind) { case 'all': return _semver.default.satisfies(filterFlowVerStr, def.flowVersionStr); case 'specific': return _semver.default.satisfies(filterFlowVerStr, def.flowVersionStr); case 'ranged': const { upper } = flowVersion; if (upper) { const lowerSpecific = { kind: 'ranged', upper: null, lower: flowVersion.lower }; const lowerSpecificSemver = (0, _flowVersion.toSemverString)(lowerSpecific); const upperSpecificSemver = (0, _flowVersion.toSemverString)({ kind: 'specific', ver: upper }); return _semver.default.satisfies(filterFlowVerStr, lowerSpecificSemver) && _semver.default.satisfies(filterFlowVerStr, upperSpecificSemver); } else { return _semver.default.satisfies(filterFlowVerStr, (0, _flowVersion.toSemverString)(def.flowVersion)); } default: flowVersion; throw new Error('Unexpected FlowVersion kind!'); } } return true; }).sort((a, b) => { const aZeroed = a.pkgVersionStr.replace(/x/g, '0'); const bZeroed = b.pkgVersionStr.replace(/x/g, '0'); const pkgCompare = _semver.default.compare(aZeroed, bZeroed); if (pkgCompare !== 0) return -pkgCompare; const aFlowVersionStr = a.flowVersionStr; const bFlowVersionStr = b.flowVersionStr; if (aFlowVersionStr == null) return 1; if (bFlowVersionStr == null) return -1; const aFlowVersion = (0, _flowVersion.parseDirString)(aFlowVersionStr); const bFlowVersion = (0, _flowVersion.parseDirString)(bFlowVersionStr); return -1 * (0, _flowVersion.compareFlowVersionAsc)(aFlowVersion, bFlowVersion); }); }