snyk
Version:
snyk library and cli utility
1,136 lines (974 loc) • 33.6 kB
JavaScript
exports.id = 409;
exports.ids = [409];
exports.modules = {
/***/ 46103:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
;
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getVersion = void 0;
const debugModule = __webpack_require__(15158);
const debug = debugModule('snyk');
const child_process_1 = __webpack_require__(63129);
function npm(method, packages, live, cwd, flags) {
flags = flags || [];
if (!packages) {
packages = [];
}
if (!Array.isArray(packages)) {
packages = [packages];
}
// only if we have packages, then always save, otherwise the command might
// be something like `npm shrinkwrap'
if (packages.length && !flags.length) {
flags.push('--save');
}
method += ' ' + flags.join(' ');
return new Promise((resolve, reject) => {
const cmd = 'npm ' + method + ' ' + packages.join(' ');
if (!cwd) {
cwd = process.cwd();
}
debug('%s$ %s', cwd, cmd);
if (!live) {
debug('[skipping - dry run]');
return resolve();
}
child_process_1.exec(cmd, {
cwd,
}, (error, stdout, stderr) => {
if (error) {
return reject(error);
}
if (stderr.indexOf('ERR!') !== -1) {
console.error(stderr.trim());
const e = new Error('npm update issues: ' + stderr.trim());
e.code = 'FAIL_UPDATE';
return reject(e);
}
debug('npm %s complete', method);
resolve();
});
});
}
exports.default = npm;
function getVersion() {
return new Promise((resolve, reject) => {
child_process_1.exec('npm --version', {
cwd: process.cwd(),
}, (error, stdout) => {
if (error) {
return reject(error);
}
return resolve(stdout);
});
});
}
exports.getVersion = getVersion;
/***/ }),
/***/ 17240:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
;
Object.defineProperty(exports, "__esModule", ({ value: true }));
const fs = __webpack_require__(35747);
const analytics = __webpack_require__(82744);
const debugModule = __webpack_require__(15158);
const request_1 = __webpack_require__(52050);
const debug = debugModule('snyk:fetch-patch');
async function getPatchFile(patchUrl, patchFilename) {
try {
const response = await request_1.makeRequest({ url: patchUrl });
if (!response ||
!response.res ||
!response.body ||
response.res.statusCode !== 200) {
throw response;
}
fs.writeFileSync(patchFilename, response.body);
debug(`Fetched patch from ${patchUrl} to ${patchFilename}, patch size ${response.body.length} bytes`);
}
catch (error) {
const errorMessage = `Failed to fetch patch from ${patchUrl} to ${patchFilename}`;
debug(errorMessage, error);
analytics.add('patch-fetch-fail', {
message: (error && error.message) || errorMessage,
code: error && error.res && error.res.statusCode,
patchFilename,
patchUrl,
});
throw new Error(errorMessage);
}
return patchFilename;
}
exports.default = getPatchFile;
/***/ }),
/***/ 96284:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
;
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.yarn = void 0;
const Debug = __webpack_require__(15158);
const child_process_1 = __webpack_require__(63129);
const errors_1 = __webpack_require__(55191);
const debug = Debug('snyk');
function yarn(method, packages, live, cwd, flags) {
flags = flags || [];
if (!packages) {
packages = [];
}
if (!Array.isArray(packages)) {
packages = [packages];
}
method += ' ' + flags.join(' ');
return new Promise((resolve, reject) => {
const cmd = 'yarn ' + method + ' ' + packages.join(' ');
if (!cwd) {
cwd = process.cwd();
}
debug('%s$ %s', cwd, cmd);
if (!live) {
debug('[skipping - dry run]');
return resolve();
}
child_process_1.exec(cmd, {
cwd,
}, (error, stdout, stderr) => {
if (error) {
return reject(error);
}
if (stderr.indexOf('ERR!') !== -1) {
console.error(stderr.trim());
const e = new errors_1.CustomError('Yarn update issues: ' + stderr.trim());
e.strCode = 'FAIL_UPDATE';
e.code = 422;
return reject(e);
}
debug('yarn %s complete', method);
resolve();
});
});
}
exports.yarn = yarn;
/***/ }),
/***/ 64029:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const { v4: uuidv4 } = __webpack_require__(42277);
const debug = __webpack_require__(15158)('snyk');
const diff = __webpack_require__(88507);
const exec = __webpack_require__(63129).exec;
const path = __webpack_require__(85622);
const fs = __webpack_require__(35747);
const semver = __webpack_require__(36625);
const { addDataAndSend } = __webpack_require__(82744);
function applyPatch(patchFileName, vuln, live, patchUrl) {
let cwd = vuln.source;
return new Promise((resolve, reject) => {
if (!cwd) {
cwd = process.cwd();
}
const relative = path.relative(process.cwd(), cwd);
debug('DRY RUN: relative: %s', relative);
try {
let pkg = {};
const packageJsonPath = path.resolve(relative, 'package.json');
try {
const packageJson = fs.readFileSync(packageJsonPath);
pkg = JSON.parse(packageJson);
debug('package at patch target location: %s@%s', pkg.name, pkg.version);
} catch (err) {
debug(
'Failed loading package.json at %s. Skipping patch!',
packageJsonPath,
err,
);
return resolve();
}
const versionOfPackageToPatch = pkg.version;
const patchableVersionsRange = vuln.patches.version;
const isSemverMatch = semver.satisfies(
versionOfPackageToPatch,
patchableVersionsRange,
);
const isVersionMatch = semver.satisfies(
versionOfPackageToPatch,
semver.valid(semver.coerce(vuln.patches.version)),
);
if (isSemverMatch || isVersionMatch) {
debug(
'Patch version range %s matches package version %s',
patchableVersionsRange,
versionOfPackageToPatch,
);
} else {
debug(
'Patch version range %s does not match package version %s. Skipping patch!',
patchableVersionsRange,
versionOfPackageToPatch,
);
return resolve();
}
const patchContent = fs.readFileSync(
path.resolve(relative, patchFileName),
'utf8',
);
jsDiff(patchContent, relative, live).then(() => {
debug('patch succeed');
resolve();
});
} catch (error) {
debug('patch command failed', relative, error);
patchError(error, relative, vuln, patchUrl).catch(reject);
}
});
}
function jsDiff(patchContent, relative, live) {
const patchedFiles = {};
return new Promise((resolve, reject) => {
diff.applyPatches(patchContent, {
loadFile: function(index, callback) {
try {
const fileName = trimUpToFirstSlash(index.oldFileName);
if (patchedFiles[fileName]) {
return callback(null, patchedFiles[fileName]);
}
const filePath = path.resolve(relative, fileName);
const content = fs.readFileSync(filePath, 'utf8');
// create an `.orig` copy of the file prior to patching it
// used in case we need to revert a patch
const origFilePath = filePath + '.orig';
fs.writeFileSync(origFilePath, content);
callback(null, content);
} catch (err) {
// collect patch metadata for error analysis
err.patchIssue = JSON.stringify(index);
callback(err);
}
},
patched: function(index, content, callback) {
try {
if (content === false) {
// `false` means the patch does not match the original content.
const error = new Error('Found a mismatching patch');
error.patchIssue = JSON.stringify(index);
throw error;
}
const newFileName = trimUpToFirstSlash(index.newFileName);
const oldFileName = trimUpToFirstSlash(index.oldFileName);
if (newFileName !== oldFileName) {
patchedFiles[oldFileName] = null;
}
patchedFiles[newFileName] = content;
callback();
} catch (err) {
callback(err);
}
},
compareLine: function(_, line, operation, patchContent) {
if (operation === ' ') {
// Ignore when no patch operators as GNU patch does
return true;
}
return line === patchContent;
},
complete: function(error) {
if (error) {
return reject(error);
}
if (!live) {
return resolve();
}
try {
// write patched files back to disk, unlink files completely removed by patching
for (const fileName in patchedFiles) {
if (typeof patchedFiles[fileName] === 'string') {
fs.writeFileSync(
path.resolve(relative, fileName),
patchedFiles[fileName],
);
} else {
fs.unlinkSync(path.resolve(relative, fileName));
}
}
resolve();
} catch (err) {
reject(err);
}
},
});
});
}
// diff data compares the same file with a dummy path (a/path/to/real.file vs b/path/to/real.file)
// skipping the dummy folder name by trimming up to the first slash
function trimUpToFirstSlash(fileName) {
return fileName && fileName.replace(/^[^/]+\//, '');
}
function patchError(error, dir, vuln, patchUrl) {
if (error && error.code === 'ENOENT') {
error.message =
'Failed to patch: the target could not be found (' + error.message + ').';
return Promise.reject(error);
}
return new Promise((resolve, reject) => {
const id = vuln.id;
exec(
'npm -v',
{
env: process.env,
},
(npmVError, versions) => {
// stderr is ignored
const npmVersion = versions && versions.split('\n').shift();
const referenceId = uuidv4();
// this is a general "patch failed", since we already check if the
// patch was applied via a flag, this means something else went
// wrong, so we'll ask the user for help to diagnose.
const filename = path.relative(process.cwd(), dir);
// post metadata to help diagnose
addDataAndSend({
command: 'patch-fail',
metadata: {
from: vuln.from.slice(1),
vulnId: id,
packageName: vuln.name,
packageVersion: vuln.version,
package: vuln.name + '@' + vuln.version,
patchError: Object.assign(
{},
{
message: error.message,
stack: error.stack,
name: error.name,
},
error,
),
'npm-version': npmVersion,
referenceId: referenceId,
patchUrl: patchUrl,
filename: filename,
},
});
const msg =
id +
' on ' +
vuln.name +
'@' +
vuln.version +
' at "' +
filename +
'"\n' +
error +
', ' +
'reference ID: ' +
referenceId +
'\n';
error = new Error(msg);
error.code = 'FAIL_PATCH';
reject(error);
},
);
});
}
module.exports = applyPatch;
/***/ }),
/***/ 53417:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports = dedupe;
const debug = __webpack_require__(15158)('snyk:patch');
const patchesForPackage = __webpack_require__(87553);
function dedupe(source) {
const removed = [];
const names = source.reduce((acc, vuln) => {
if (Array.isArray(vuln.patches)) {
// strip down to the only paches that can be applied
vuln.patches = patchesForPackage(vuln);
}
const key = vuln.name + vuln.version + vuln.from.join('>');
const other = acc[key];
if (other) {
debug('dupe found on %s & %s', vuln.id, other.id);
if (vuln.publicationTime > other.publicationTime) {
debug('stripping %s', other.id);
removed.push(other);
acc[key] = vuln;
} else {
removed.push(vuln);
}
} else {
acc[key] = vuln;
}
return acc;
}, {});
// turn back into an array
const packages = Object.keys(names).map((key) => {
return names[key];
});
return {
packages: packages,
removed: removed,
};
}
/***/ }),
/***/ 82645:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports = getVulnSource;
const debug = __webpack_require__(15158)('snyk');
const resolve = __webpack_require__(7320);
const path = __webpack_require__(85622);
const statSync = __webpack_require__(35747).statSync;
const { parsePackageString: moduleToObject } = __webpack_require__(60390);
function getVulnSource(vuln, live) {
const from = vuln.from.slice(1).map((pkg) => {
return moduleToObject(pkg).name;
});
const viaPath = path.resolve(
process.cwd(),
'node_modules',
from.join('/node_modules/'),
);
let source = vuln.__filename ? path.dirname(vuln.__filename) : viaPath;
// try to stat the directory, if it throws, it doesn't exist...
try {
statSync(source);
} catch (e) {
// ...which means the package is located in a parent path (from an
// npm dedupe process), so we remove the module name from the path
// and use the `resolve` package to navigate the node_modules up
// through parent directories.
try {
source = resolve.sync(from.slice(-1).pop(), viaPath);
} catch (e) {
let adaptedError = e;
if (e.code === 'NO_PACKAGE_FOUND') {
adaptedError =
'Error: `' +
e.message +
"`\nWe can't patch without " +
'dependencies installed. Please run `npm ' +
'install` or `yarn install` first.';
}
if (live) {
throw adaptedError;
}
// otherwise this is a dry run so we don't mind that it won't be
// able to patch - likely a scenario run, so it's fine that the
// patch target won't be found
}
debug('found better source for package: %s', source);
}
return source;
}
/***/ }),
/***/ 6346:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports = ignore;
const debug = __webpack_require__(15158)('snyk');
const stripVersions = __webpack_require__(15177);
const oneDay = 1000 * 60 * 60 * 24;
function ignore(data) {
return new Promise((resolve) => {
const config = {};
config.ignore = data
.map((res) => {
const vuln = res.vuln;
const days = res.meta.days || 30;
const ignoreRule = {};
ignoreRule[stripVersions(vuln.from.slice(1)).join(' > ')] = {
reason: res.meta.reason,
expires: new Date(Date.now() + oneDay * days).toJSON(),
};
ignoreRule.vulnId = vuln.id;
return ignoreRule;
})
.reduce((acc, curr) => {
if (!acc[curr.vulnId]) {
acc[curr.vulnId] = [];
}
const id = curr.vulnId;
delete curr.vulnId;
acc[id].push(curr);
return acc;
}, {});
// final format looks like test/fixtures/protect-interactive-config.json
debug('ignore config', config);
resolve(config);
});
}
/***/ }),
/***/ 5409:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
const protect = (module.exports = {
ignore: __webpack_require__(6346),
update: __webpack_require__(37466)/* .update */ .Vx,
install: __webpack_require__(37466)/* .install */ .N9,
installDev: __webpack_require__(37466)/* .installDev */ .V7,
patch: __webpack_require__(39609),
patchesForPackage: __webpack_require__(87553),
generatePolicy: generatePolicy,
});
const debug = __webpack_require__(15158)('snyk');
const flattenDeep = __webpack_require__(44779);
const merge = __webpack_require__(72378);
function generatePolicy(policy, tasks, live, packageManager) {
const promises = ['ignore', 'update', 'patch']
.filter((task) => {
return tasks[task].length;
})
.map((task) => {
return protect[task](tasks[task], live, packageManager);
});
return Promise.all(promises).then((res) => {
// we're squashing the arrays of arrays into a flat structure
// with only non-false values
const results = flattenDeep(res).filter(Boolean);
// then we merge the configs together using the original config
// as the baseline (this lets us retain the user's existing config)
results.unshift(policy);
const newPolicy = merge(...results);
debug(JSON.stringify(newPolicy, '', 2));
return newPolicy;
});
}
/***/ }),
/***/ 39609:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports = patch;
const now = new Date();
const debug = __webpack_require__(15158)('snyk');
const chalk = __webpack_require__(32589);
const glob = __webpack_require__(12884);
const tempy = __webpack_require__(30488);
const fs = __webpack_require__(35747);
const path = __webpack_require__(85622);
const flatten = __webpack_require__(5800);
const cloneDeep = __webpack_require__(83465);
const applyPatch = __webpack_require__(64029);
const stripVersions = __webpack_require__(15177);
const getVulnSource = __webpack_require__(82645);
const dedupe = __webpack_require__(53417);
const writePatchFlag = __webpack_require__(87666);
const spinner = __webpack_require__(86766);
const errors = __webpack_require__(79407);
const analytics = __webpack_require__(82744);
const { default: getPatchFile } = __webpack_require__(17240);
function patch(vulns, live) {
const lbl = 'Applying patches...';
const errorList = [];
return (
spinner(lbl)
.then(() => {
// the target directory where our module name will live
vulns.forEach((vuln) => (vuln.source = getVulnSource(vuln, live)));
const deduped = dedupe(vulns);
debug('patching %s vulns after dedupe', deduped.packages.length);
// find the patches, pull them down off the web, save them in a temp file
// then apply each individual patch - but do it one at a time (via reduce)
const promises = deduped.packages.reduce((acc, vuln) => {
return acc.then((res) => {
const patches = vuln.patches; // this is also deduped in `dedupe`
if (patches === null) {
debug('no patch available for ' + vuln.id);
analytics.add('no-patch', vuln.from.slice(1).join(' > '));
return res;
}
analytics.add('patch', vuln.from.slice(1).join(' > '));
debug(`Patching vuln: ${vuln.id} ${vuln.from}`);
// the colon doesn't like Windows, ref: https://git.io/vw2iO
const fileSafeId = vuln.id.replace(/:/g, '-');
const flag = path.resolve(
vuln.source,
'.snyk-' + fileSafeId + '.flag',
);
const oldFlag = path.resolve(
vuln.source,
'.snyk-' + vuln.id + '.flag',
);
// get the patches on the local fs
const promises = patches.urls.map((url) => {
const filename = tempy.file({
extension: '.' + fileSafeId + '.snyk-patch',
});
return getPatchFile(url, filename)
.then((patch) => {
// check whether there's a trace of us having patched before
return Promise.resolve(fs.existsSync(flag))
.then((exists) => {
// if the file doesn't exist, look for the old style filename
// in case and for backwards compatability
return exists || fs.existsSync(oldFlag);
})
.then((exists) => {
if (!exists) {
return patch;
}
debug(
'Previous flag found = ' +
exists +
' | Restoring file back to original to apply the patch again',
);
// else revert the patch
return new Promise((resolve, reject) => {
// find all backup files that do not belong to transitive deps
glob(
'**/*.orig',
{ cwd: vuln.source, ignore: '**/node_modules/**' },
(error, files) => {
if (error) {
return reject(error);
}
// copy '.orig' backups over the patched files
for (const file of files) {
const backupFile = path.resolve(
vuln.source,
file,
);
const sourceFile = backupFile.slice(
0,
-'.orig'.length,
);
debug('restoring', backupFile, sourceFile);
fs.renameSync(backupFile, sourceFile);
}
resolve(patch);
},
);
});
});
})
.then((patch) => {
if (patch === false) {
debug('already patched %s', vuln.id);
return vuln;
}
debug(
'applying patch file for %s: \n%s\n%s',
vuln.id,
url,
patch,
);
return applyPatch(patch, vuln, live, url)
.then(
() => {
return true;
},
(e) => {
errorList.push(e);
return false;
},
)
.then(writePatchFlag(now, vuln))
.then((ok) => {
return ok ? vuln : false;
});
});
});
return Promise.all(promises).then((result) => {
res.push(result);
return res; // this is what makes the waterfall reduce chain work
});
});
}, Promise.resolve(deduped.removed));
const promise = promises
.then((res) => {
const patched = flatten(res).filter(Boolean);
if (!live) {
debug('[skipping - dry run]');
return patched;
}
return Promise.all(patched);
})
.then((patched) => {
const config = {};
// this reduce function will look to see if the patch actually resolves
// more than one vulnerability, and if it does, it'll replicate the
// patch rule against the *other* vuln.ids. This will happen when the user
// runs the wizard and selects to apply a patch that fixes more than one
// vuln.
const mapped = patched.map(patchRule).reduce((acc, curr, i) => {
const vuln = patched[i];
if (vuln.grouped && vuln.grouped.includes) {
vuln.grouped.includes.forEach((id) => {
const rule = cloneDeep(curr);
rule.vulnId = id;
acc.push(rule);
});
}
acc.push(curr);
return acc;
}, []);
config.patch = mapped.reduce((acc, curr) => {
if (!acc[curr.vulnId]) {
acc[curr.vulnId] = [];
}
const id = curr.vulnId;
delete curr.vulnId;
acc[id].push(curr);
return acc;
}, {});
debug('patched', config);
return config;
});
return promise;
})
// clear spinner in case of success or failure
.then(spinner.clear(lbl))
.catch((error) => {
spinner.clear(lbl)();
throw error;
})
.then((res) => {
if (errorList.length) {
errorList.forEach((error) => {
console.log(chalk.red(errors.message(error)));
debug(error.stack);
});
throw new Error(
'Please email support@snyk.io if this problem persists.',
);
}
return res;
})
);
}
function patchRule(vuln) {
const rule = {
vulnId: vuln.id,
};
rule[stripVersions(vuln.from.slice(1)).join(' > ')] = {
patched: now.toJSON(),
};
return rule;
}
/***/ }),
/***/ 87553:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports = patchesForPackage;
const semver = __webpack_require__(36625);
function patchesForPackage(vuln) {
return (
vuln.patches.filter((patch) => {
if (semver.satisfies(vuln.version, patch.version)) {
return (patch.urls || []).length ? patch : false;
}
return false;
})[0] || null
);
}
/***/ }),
/***/ 15177:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports = stripVersions;
const { parsePackageString: moduleToObject } = __webpack_require__(60390);
function stripVersions(packages) {
return packages.map((pkg) => {
return moduleToObject(pkg).name;
});
}
/***/ }),
/***/ 37466:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports.Vx = update;
module.exports.N9 = install;
module.exports.V7 = installDev;
const debug = __webpack_require__(15158)('snyk');
const chalk = __webpack_require__(32589);
const uniq = __webpack_require__(97644);
const isEmpty = __webpack_require__(99245);
const { parsePackageString: moduleToObject } = __webpack_require__(60390);
const semver = __webpack_require__(36625);
const errors = __webpack_require__(79407);
const { default: npm } = __webpack_require__(46103);
const { yarn } = __webpack_require__(96284);
const spinner = __webpack_require__(86766);
const analytics = __webpack_require__(82744);
function update(packages, live, pkgManager) {
pkgManager = pkgManager || 'npm';
const lbl = 'Applying updates using ' + pkgManager + '...';
let error = false;
return (
spinner(lbl)
.then(() => {
const upgrade = packages
.map((vuln) => {
const remediation = vuln.upgradePath && vuln.upgradePath[1];
if (!remediation) {
// this vuln holds an unreachable upgrade path - send this to analytics
// and return an empty object to be filtered
analytics.add('bad-upgrade-path', vuln);
return null;
}
return {
remediation: remediation,
type: vuln.parentDepType || 'prod',
};
})
.filter(Boolean)
.reduce((ups, vuln) => {
if (!ups[vuln.type]) {
ups[vuln.type] = [];
}
ups[vuln.type].push(vuln.remediation);
return ups;
}, {});
debug('to upgrade', upgrade);
if (upgrade.length === 0) {
return;
}
// warn if extraneous packages were selected for update
if (upgrade.extraneous) {
console.error(
chalk.yellow(
'Extraneous packages were selected for ' +
'update, but will be skipped. These dependencies introduce ' +
'vulnerabilities. Please remove the dependencies with `npm prune`, ' +
'or install properly as prod or dev dependencies:',
upgrade.extraneous.join(', '),
),
);
}
const promise = Promise.resolve()
.then(() => {
// create list of unique package names _without versions_ for uninstall
// skip extraneous packages, if any
const prodToUninstall =
(upgrade.prod && upgrade.prod.map(stripVersion)) || [];
const devToUninstall =
(upgrade.dev && upgrade.dev.map(stripVersion)) || [];
const toUninstall = uniq(prodToUninstall.concat(devToUninstall));
debug('to uninstall', toUninstall);
if (!isEmpty(toUninstall)) {
return uninstall(pkgManager, toUninstall, live);
}
})
.then(() => {
const prodUpdate = (upgrade.prod
? install(pkgManager, findUpgrades(upgrade.prod), live)
: Promise.resolve(true)
).catch((e) => {
error = e;
return false;
});
const devUpdate = (upgrade.dev
? installDev(pkgManager, findUpgrades(upgrade.dev), live)
: Promise.resolve(true)
).catch((e) => {
error = e;
return false;
});
return Promise.all([prodUpdate, devUpdate]).then((results) => {
return results[0] && results[1];
});
});
return promise;
})
// clear spinner in case of success or failure
.then(spinner.clear(lbl))
.catch((error) => {
spinner.clear(lbl)();
throw error;
})
.then((res) => {
if (error) {
console.error(chalk.red(errors.message(error)));
debug(error.stack);
}
return res;
})
);
}
function install(pkgManager, upgrades, live) {
return pkgManager === 'yarn'
? yarn('add', upgrades, live)
: npm('install', upgrades, live);
}
function installDev(pkgManager, upgrades, live) {
return pkgManager === 'yarn'
? yarn('add', upgrades, live, null, ['--dev'])
: npm('install', upgrades, live, null, ['--save-dev']);
}
function uninstall(pkgManager, toUninstall, live) {
return pkgManager === 'yarn'
? yarn('remove', toUninstall, live)
: npm('uninstall', toUninstall, live);
}
function findUpgrades(packages) {
return packages
.map(moduleToObject)
.reduce((acc, curr) => {
const have = acc
.filter((pkg) => {
return pkg.name === curr.name;
})
.pop();
if (have) {
if (semver.gt(curr.version, have.version)) {
have.version = curr.version;
}
} else {
acc.push(curr);
}
return acc;
}, [])
.map((pkg) => {
return pkg.name + '@' + pkg.version;
});
}
function stripVersion(pkg) {
if (!pkg) {
return;
}
// scoped packages like @snyk/module@1.0.0
if (pkg.startsWith('@')) {
return '@' + pkg.split('@')[1];
}
// non-scoped packages like snyk@1.2.3
if (pkg.indexOf('@') > 0) {
return pkg.split('@').shift();
}
// versionless packages like tap
return pkg;
}
/***/ }),
/***/ 87666:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
module.exports = writePatchFlag;
const debug = __webpack_require__(15158)('snyk');
const fs = __webpack_require__(35747);
const path = __webpack_require__(85622);
function writePatchFlag(now, vuln) {
if (!vuln) {
vuln = now;
now = new Date();
}
debug('writing flag for %s', vuln.id);
let promise;
// the colon doesn't like Windows, ref: https://git.io/vw2iO
const fileSafeId = vuln.id.replace(/:/g, '-');
const flag = path.resolve(vuln.source, '.snyk-' + fileSafeId + '.flag');
if (vuln.grouped && vuln.grouped.includes) {
debug('found addition vulns to write flag files for');
const writePromises = [];
fs.writeFileSync(flag, now.toJSON(), 'utf8');
vuln.grouped.includes.forEach(() => {
const fileSafeId = vuln.id.replace(/:/g, '-');
const flag = path.resolve(vuln.source, '.snyk-' + fileSafeId + '.flag');
debug('Writing flag for grouped vulns', flag);
writePromises.push();
fs.writeFileSync(flag, now.toJSON(), 'utf8');
});
promise = Promise.all(writePromises);
} else {
debug('Writing flag for single vuln', flag);
/* TODO:
This piece is actually swallowing fs.writeFile errors!
See the `promise.then` construct below.
This should be refactored and tests should be updated.
*/
promise = new Promise((r) => fs.writeFile(flag, now.toJSON(), 'utf8', r));
}
return promise.then(() => {
return vuln;
});
}
/***/ })
};
;
//# sourceMappingURL=409.index.js.map