snyk
Version:
snyk library and cli utility
1,161 lines (1,083 loc) • 8.44 MB
JavaScript
exports.id = 917;
exports.ids = [917];
exports.modules = {
/***/ 68214:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.formatTestError = void 0;
function formatTestError(error) {
// Possible error cases:
// - the test found some vulns. `error.message` is a
// JSON-stringified
// test result.
// - the flow failed, `error` is a real Error object.
// - the flow failed, `error` is a number or string
// describing the problem.
//
// To standardise this, make sure we use the best _object_ to
// describe the error.
let errorResponse;
if (error instanceof Error) {
errorResponse = error;
}
else if (typeof error !== 'object') {
errorResponse = new Error(error);
}
else {
try {
errorResponse = JSON.parse(error.message);
}
catch (unused) {
errorResponse = error;
}
}
return errorResponse;
}
exports.formatTestError = formatTestError;
/***/ }),
/***/ 26747:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.extractLineNumber = exports.getFileTypeForParser = void 0;
const types_1 = __webpack_require__(42258);
const errors_1 = __webpack_require__(55191);
const cloud_config_parser_1 = __webpack_require__(98611);
const file_parser_1 = __webpack_require__(2314);
const analytics = __webpack_require__(82744);
const Debug = __webpack_require__(15158);
const error_utils_1 = __webpack_require__(23872);
const debug = Debug('iac-extract-line-number');
function getFileTypeForParser(fileType) {
switch (fileType) {
case 'yaml':
case 'yml':
return cloud_config_parser_1.CloudConfigFileTypes.YAML;
case 'json':
return cloud_config_parser_1.CloudConfigFileTypes.JSON;
case 'tf':
return cloud_config_parser_1.CloudConfigFileTypes.TF;
default:
throw new file_parser_1.UnsupportedFileTypeError(fileType);
}
}
exports.getFileTypeForParser = getFileTypeForParser;
function extractLineNumber(cloudConfigPath, fileType, treeByDocId) {
try {
return cloud_config_parser_1.getLineNumber(cloudConfigPath, fileType, treeByDocId);
}
catch {
const err = new FailedToExtractLineNumberError();
analytics.add('error-code', err.code);
debug('Parser library failed. Could not assign lineNumber to issue');
return -1;
}
}
exports.extractLineNumber = extractLineNumber;
class FailedToExtractLineNumberError extends errors_1.CustomError {
constructor(message) {
super(message || 'Parser library failed. Could not assign lineNumber to issue');
this.code = types_1.IaCErrorCodes.FailedToExtractLineNumberError;
this.strCode = error_utils_1.getErrorStringCode(this.code);
this.userMessage = ''; // Not a user facing error.
}
}
/***/ }),
/***/ 13552:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.FailedToLoadFileError = exports.NoFilesToScanError = exports.tryLoadFileData = exports.loadFiles = exports.loadContentForFiles = void 0;
const makeDirectoryIterator_1 = __webpack_require__(87491);
const fs_1 = __webpack_require__(35747);
const types_1 = __webpack_require__(42258);
const detect_1 = __webpack_require__(45318);
const errors_1 = __webpack_require__(55191);
const error_utils_1 = __webpack_require__(23872);
const DEFAULT_ENCODING = 'utf-8';
async function loadContentForFiles(filePaths) {
const loadedFiles = await Promise.all(filePaths.map(async (filePath) => {
try {
return await tryLoadFileData(filePath);
}
catch (e) {
throw new FailedToLoadFileError(filePath);
}
}));
return loadedFiles.filter((file) => file.fileContent !== '');
}
exports.loadContentForFiles = loadContentForFiles;
async function loadFiles(pathToScan, options = {}, validFileTypes) {
const filePaths = getFilePathsFromDirectory(pathToScan, {
maxDepth: options.detectionDepth,
validFileTypes,
});
if (filePaths.length === 0) {
throw new NoFilesToScanError();
}
const loadedFiles = await loadContentForFiles(filePaths);
return loadedFiles.filter((file) => file.fileContent !== '');
}
exports.loadFiles = loadFiles;
function hasValidFileType(filePath, validFileTypes = types_1.VALID_FILE_TYPES) {
return validFileTypes.includes(getFileType(filePath));
}
function getFilePathsFromDirectory(pathToScan, options = {}) {
const resFilePaths = [];
if (detect_1.isLocalFolder(pathToScan)) {
// Directory
const dirIterator = makeDirectoryIterator_1.makeDirectoryIterator(pathToScan, {
maxDepth: options.maxDepth,
});
for (const filePath of dirIterator) {
if (hasValidFileType(filePath, options.validFileTypes)) {
resFilePaths.push(filePath);
}
}
}
else {
// File
if (hasValidFileType(pathToScan, options.validFileTypes)) {
resFilePaths.push(pathToScan);
}
}
return resFilePaths;
}
async function tryLoadFileData(pathToScan) {
const fileType = getFileType(pathToScan);
const fileContent = (await fs_1.promises.readFile(pathToScan, DEFAULT_ENCODING)).toString();
return {
filePath: pathToScan,
fileType: fileType,
fileContent,
};
}
exports.tryLoadFileData = tryLoadFileData;
class NoFilesToScanError extends errors_1.CustomError {
constructor(message) {
super(message || 'Could not find any valid IaC files');
this.code = types_1.IaCErrorCodes.NoFilesToScanError;
this.strCode = error_utils_1.getErrorStringCode(this.code);
this.userMessage =
'Could not find any valid infrastructure as code files. Supported file extensions are tf, yml, yaml & json.\nMore information can be found by running `snyk iac test --help` or through our documentation:\nhttps://support.snyk.io/hc/en-us/articles/360012429477-Test-your-Kubernetes-files-with-our-CLI-tool\nhttps://support.snyk.io/hc/en-us/articles/360013723877-Test-your-Terraform-files-with-our-CLI-tool';
}
}
exports.NoFilesToScanError = NoFilesToScanError;
class FailedToLoadFileError extends errors_1.CustomError {
constructor(filename) {
super('Failed to load file content');
this.code = types_1.IaCErrorCodes.FailedToLoadFileError;
this.strCode = error_utils_1.getErrorStringCode(this.code);
this.userMessage = `We were unable to read file "${filename}" for scanning. Please ensure that it is readable.`;
}
}
exports.FailedToLoadFileError = FailedToLoadFileError;
function getFileType(filePath) {
const filePathSplit = filePath.split('.');
return filePathSplit[filePathSplit.length - 1].toLowerCase();
}
/***/ }),
/***/ 2314:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.UnsupportedFileTypeError = exports.tryParseIacFile = exports.parseTerraformFiles = exports.parseFiles = void 0;
const config_type_detection_1 = __webpack_require__(93669);
const terraform_file_parser_1 = __webpack_require__(29263);
const file_loader_1 = __webpack_require__(13552);
const terraform_plan_parser_1 = __webpack_require__(81893);
const types_1 = __webpack_require__(42258);
const analytics = __webpack_require__(82744);
const errors_1 = __webpack_require__(55191);
const error_utils_1 = __webpack_require__(23872);
const yaml_parser_1 = __webpack_require__(91993);
const hcl_to_json_v2_1 = __webpack_require__(11640);
const constants_1 = __webpack_require__(68620);
const Debug = __webpack_require__(15158);
const debug = Debug('snyk-test');
async function parseFiles(filesData, options = {}) {
const parsedFiles = [];
const failedFiles = [];
for (const fileData of filesData) {
try {
parsedFiles.push(...tryParseIacFile(fileData, options));
}
catch (err) {
if (filesData.length === 1) {
throw err;
}
failedFiles.push(generateFailedParsedFile(fileData, err));
}
}
if (parsedFiles.length === 0) {
throw new file_loader_1.NoFilesToScanError();
}
return {
parsedFiles,
failedFiles,
};
}
exports.parseFiles = parseFiles;
function parseTerraformFiles(filesData) {
// the parser expects a map of <filePath>:<fileContent> key-value pairs
const files = filesData.reduce((map, fileData) => {
map[fileData.filePath] = fileData.fileContent;
return map;
}, {});
const { parsedFiles, failedFiles, debugLogs } = hcl_to_json_v2_1.default(files);
// only throw an error when there were multiple files provided
if (filesData.length === 1 && Object.keys(failedFiles).length === 1) {
if (debugLogs[filesData[0].filePath]) {
debug('File %s failed to parse with: %s', filesData[0].filePath, debugLogs[filesData[0].filePath]);
}
throw new terraform_file_parser_1.FailedToParseTerraformFileError(filesData[0].filePath);
}
const parsingResults = {
parsedFiles: [],
failedFiles: [],
};
for (const fileData of filesData) {
if (parsedFiles[fileData.filePath]) {
parsingResults.parsedFiles.push({
...fileData,
jsonContent: JSON.parse(parsedFiles[fileData.filePath]),
projectType: constants_1.IacProjectType.TERRAFORM,
engineType: types_1.EngineType.Terraform,
});
}
else if (failedFiles[fileData.filePath]) {
if (debugLogs[fileData.filePath]) {
debug('File %s failed to parse with: %s', fileData.filePath, debugLogs[fileData.filePath]);
}
parsingResults.failedFiles.push(generateFailedParsedFile(fileData, new Error(failedFiles[fileData.filePath])));
}
}
return parsingResults;
}
exports.parseTerraformFiles = parseTerraformFiles;
function generateFailedParsedFile({ fileType, filePath, fileContent }, err) {
return {
err,
failureReason: err.message,
fileType,
filePath,
fileContent,
engineType: null,
jsonContent: null,
};
}
function tryParseIacFile(fileData, options = {}) {
analytics.add('iac-terraform-plan', false);
switch (fileData.fileType) {
case 'yaml':
case 'yml': {
const parsedIacFile = yaml_parser_1.parseYAMLOrJSONFileData(fileData);
return config_type_detection_1.detectConfigType(fileData, parsedIacFile);
}
case 'json': {
const parsedIacFile = yaml_parser_1.parseYAMLOrJSONFileData(fileData);
// the Kubernetes file can have more than one JSON object in it
// but the Terraform plan can only have one
if (parsedIacFile.length === 1 && terraform_plan_parser_1.isTerraformPlan(parsedIacFile[0])) {
analytics.add('iac-terraform-plan', true);
return terraform_plan_parser_1.tryParsingTerraformPlan(fileData, parsedIacFile[0], {
isFullScan: options.scan === types_1.TerraformPlanScanMode.FullScan,
});
}
else {
return config_type_detection_1.detectConfigType(fileData, parsedIacFile);
}
}
case 'tf':
return terraform_file_parser_1.tryParsingTerraformFile(fileData);
default:
throw new UnsupportedFileTypeError(fileData.fileType);
}
}
exports.tryParseIacFile = tryParseIacFile;
class UnsupportedFileTypeError extends errors_1.CustomError {
constructor(fileType) {
super('Unsupported file extension');
this.code = types_1.IaCErrorCodes.UnsupportedFileTypeError;
this.strCode = error_utils_1.getErrorStringCode(this.code);
this.userMessage = `Unable to process the file with extension ${fileType}. Supported file extensions are tf, yml, yaml & json.\nMore information can be found by running \`snyk iac test --help\` or through our documentation:\nhttps://support.snyk.io/hc/en-us/articles/360012429477-Test-your-Kubernetes-files-with-our-CLI-tool\nhttps://support.snyk.io/hc/en-us/articles/360013723877-Test-your-Terraform-files-with-our-CLI-tool`;
}
}
exports.UnsupportedFileTypeError = UnsupportedFileTypeError;
/***/ }),
/***/ 16879:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.FailedToExecutePolicyEngine = exports.FailedToBuildPolicyEngine = exports.clearPolicyEngineCache = exports.scanFiles = void 0;
const types_1 = __webpack_require__(42258);
const opa_wasm_1 = __webpack_require__(79264);
const fs = __webpack_require__(35747);
const local_cache_1 = __webpack_require__(6255);
const errors_1 = __webpack_require__(55191);
const error_utils_1 = __webpack_require__(23872);
async function scanFiles(parsedFiles) {
// TODO: gracefully handle failed scans
const scanResults = [];
for (const parsedFile of parsedFiles) {
const policyEngine = await getPolicyEngine(parsedFile.engineType);
const result = policyEngine.scanFile(parsedFile);
scanResults.push(result);
}
return scanResults;
}
exports.scanFiles = scanFiles;
async function getPolicyEngine(engineType) {
if (policyEngineCache[engineType]) {
return policyEngineCache[engineType];
}
policyEngineCache[engineType] = await buildPolicyEngine(engineType);
return policyEngineCache[engineType];
}
// used in tests only
function clearPolicyEngineCache() {
policyEngineCache = {
[types_1.EngineType.Kubernetes]: null,
[types_1.EngineType.Terraform]: null,
[types_1.EngineType.CloudFormation]: null,
[types_1.EngineType.ARM]: null,
[types_1.EngineType.Custom]: null,
};
}
exports.clearPolicyEngineCache = clearPolicyEngineCache;
let policyEngineCache = {
[types_1.EngineType.Kubernetes]: null,
[types_1.EngineType.Terraform]: null,
[types_1.EngineType.CloudFormation]: null,
[types_1.EngineType.ARM]: null,
[types_1.EngineType.Custom]: null,
};
async function buildPolicyEngine(engineType) {
const [policyEngineCoreDataPath, policyEngineMetaDataPath,] = local_cache_1.getLocalCachePath(engineType);
try {
const wasmFile = fs.readFileSync(policyEngineCoreDataPath);
const policyMetaData = fs.readFileSync(policyEngineMetaDataPath);
const policyMetadataAsJson = JSON.parse(policyMetaData.toString());
const opaWasmInstance = await opa_wasm_1.loadPolicy(Buffer.from(wasmFile));
opaWasmInstance.setData(policyMetadataAsJson);
return new PolicyEngine(opaWasmInstance);
}
catch (err) {
throw new FailedToBuildPolicyEngine();
}
}
class PolicyEngine {
constructor(opaWasmInstance) {
this.opaWasmInstance = opaWasmInstance;
this.opaWasmInstance = opaWasmInstance;
}
evaluate(data) {
return this.opaWasmInstance.evaluate(data)[0].result;
}
scanFile(iacFile) {
try {
const violatedPolicies = this.evaluate(iacFile.jsonContent);
return {
...iacFile,
violatedPolicies,
};
}
catch (err) {
// TODO: to distinguish between different failure reasons
throw new FailedToExecutePolicyEngine();
}
}
}
class FailedToBuildPolicyEngine extends errors_1.CustomError {
constructor(message) {
super(message || 'Failed to build policy engine');
this.code = types_1.IaCErrorCodes.FailedToBuildPolicyEngine;
this.strCode = error_utils_1.getErrorStringCode(this.code);
this.userMessage =
'We were unable to run the test. Please run the command again with the `-d` flag and contact support@snyk.io with the contents of the output';
}
}
exports.FailedToBuildPolicyEngine = FailedToBuildPolicyEngine;
class FailedToExecutePolicyEngine extends errors_1.CustomError {
constructor(message) {
super(message || 'Failed to execute policy engine');
this.code = types_1.IaCErrorCodes.FailedToExecutePolicyEngine;
this.strCode = error_utils_1.getErrorStringCode(this.code);
this.userMessage =
'We were unable to run the test. Please run the command again with the `-d` flag and contact support@snyk.io with the contents of the output';
}
}
exports.FailedToExecutePolicyEngine = FailedToExecutePolicyEngine;
/***/ }),
/***/ 89708:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.isIgnoredFile = exports.isSingleFile = exports.shouldBeParsed = exports.getExtensionForPath = exports.makeFileAndDirectoryGenerator = void 0;
const fs = __webpack_require__(35747);
const path_1 = __webpack_require__(85622);
const fs_1 = __webpack_require__(35747);
const path = __webpack_require__(85622);
const types_1 = __webpack_require__(42258);
/**
* makeFileAndDirectoryGenerator is a generator function that helps walking the directory and file structure of this pathToScan
* @param root
* @param maxDepth? - An optional `maxDepth` argument can be provided to limit how deep in the file tree the search will go.
* @returns {Generator<object>} - a generator which yields an object with directories or paths for the path to scan
*/
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
function* makeFileAndDirectoryGenerator(root = '.', maxDepth) {
function* generatorHelper(pathToScan, currentDepth) {
{
yield { directory: pathToScan };
}
if (maxDepth !== currentDepth) {
for (const dirent of fs_1.readdirSync(pathToScan, { withFileTypes: true })) {
if (dirent.isDirectory() &&
fs.readdirSync(path_1.join(pathToScan, dirent.name)).length !== 0) {
yield* generatorHelper(path_1.join(pathToScan, dirent.name), currentDepth + 1);
}
else if (dirent.isFile()) {
yield {
file: {
dir: pathToScan,
fileName: path_1.join(pathToScan, dirent.name),
},
};
}
}
}
}
yield* generatorHelper(root, 1);
}
exports.makeFileAndDirectoryGenerator = makeFileAndDirectoryGenerator;
exports.getExtensionForPath = (pathToScan) => path.extname(pathToScan).toLowerCase();
exports.shouldBeParsed = (pathToScan) => types_1.VALID_TERRAFORM_FILE_TYPES.includes(exports.getExtensionForPath(pathToScan));
exports.isSingleFile = (pathToScan) => !!exports.getExtensionForPath(pathToScan);
/**
* Checks if a file should be ignored from loading or not according to the filetype.
* We ignore the same files that Terraform ignores.
* https://github.com/hashicorp/terraform/blob/dc63fda44b67300d5161dabcd803426d0d2f468e/internal/configs/parser_config_dir.go#L137-L143
* @param {string} pathToScan - The filepath to check
* @returns {boolean} if the filepath should be ignored or not
*/
function isIgnoredFile(pathToScan) {
// we normalise the path in case the user tries to scan a single file with a relative path
// e.g. './my-folder/terraform.tf'
const normalisedPath = path.normalize(pathToScan);
return (normalisedPath.startsWith('.') || // Unix-like hidden files
normalisedPath.startsWith('~') || // vim
(normalisedPath.startsWith('#') && normalisedPath.endsWith('#')) // emacs
);
}
exports.isIgnoredFile = isIgnoredFile;
/***/ }),
/***/ 41806:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getTerraformFilesInDirectoryGenerator = exports.getFilesForDirectory = exports.getAllDirectoriesForPath = exports.loadAndParseTerraformFiles = void 0;
const file_loader_1 = __webpack_require__(13552);
const path = __webpack_require__(85622);
const file_parser_1 = __webpack_require__(2314);
const handle_terraform_files_utils_1 = __webpack_require__(89708);
/**
* This function handles everything from filtering out Terraform files directory by directory,
* loading them, sending them to the parser and getting the parsing results back.
* Then it concatenates and returns the new parsing results with the existing parsing results.
* @param pathToScan - the path to scan provided by the user
* @param maxDepth? - an optional maxDepth of directories if provided by the detection-depth flag
* @returns { parsedFiles, failedFiles} - all the parsing results so far
*/
async function loadAndParseTerraformFiles(pathToScan, maxDepth) {
let parsedFiles = [], failedFiles = [];
const allDirectories = getAllDirectoriesForPath(pathToScan, maxDepth);
// we load and parse files directory by directory
// this is because we need all files in the same directory to share the same variable context
for (const currentDirectory of allDirectories) {
const filePathsInDirectory = getFilesForDirectory(pathToScan, currentDirectory);
const tfFilesToParse = await file_loader_1.loadContentForFiles(filePathsInDirectory);
const { parsedFiles: parsedTfFiles, failedFiles: failedTfFiles, } = file_parser_1.parseTerraformFiles(tfFilesToParse);
parsedFiles = parsedFiles.concat(parsedTfFiles);
failedFiles = failedFiles.concat(failedTfFiles);
}
if (parsedFiles.length === 0) {
throw new file_loader_1.NoFilesToScanError();
}
return { parsedFiles, failedFiles };
}
exports.loadAndParseTerraformFiles = loadAndParseTerraformFiles;
/**
* Gets all nested directories for the path that we ran a scan.
* @param pathToScan - the path to scan provided by the user
* @param maxDepth? - An optional `maxDepth` argument can be provided to limit how deep in the file tree the search will go.
* @returns {string[]} An array with all the non-empty nested directories in this path
*/
function getAllDirectoriesForPath(pathToScan, maxDepth) {
// if it is a single file (it has an extension), we return the current path
if (handle_terraform_files_utils_1.isSingleFile(pathToScan)) {
return [path.resolve(pathToScan)];
}
return [...getAllDirectoriesForPathGenerator(pathToScan, maxDepth)];
}
exports.getAllDirectoriesForPath = getAllDirectoriesForPath;
/**
* Gets all the directories included in this path
* @param pathToScan - the path to scan provided by the user
* @param maxDepth? - An optional `maxDepth` argument can be provided to limit how deep in the file tree the search will go.
* @returns {Generator<string>} - a generator which yields the filepaths for the path to scan
*/
function* getAllDirectoriesForPathGenerator(pathToScan, maxDepth) {
for (const filePath of handle_terraform_files_utils_1.makeFileAndDirectoryGenerator(pathToScan, maxDepth)) {
if (filePath.directory)
yield filePath.directory;
}
}
/**
* Gets all file paths for the specific directory. If the provided path is a supported file, then it gets returned.
* @param pathToScan - the path to scan provided by the user
* @param currentDirectory - the directory which we want to return files for
* @returns {string[]} An array with all the Terraform filePaths for this directory
*/
function getFilesForDirectory(pathToScan, currentDirectory) {
if (handle_terraform_files_utils_1.isSingleFile(pathToScan)) {
if (handle_terraform_files_utils_1.shouldBeParsed(pathToScan) && !handle_terraform_files_utils_1.isIgnoredFile(pathToScan)) {
return [pathToScan];
}
return [];
}
else {
return [...getTerraformFilesInDirectoryGenerator(currentDirectory)];
}
}
exports.getFilesForDirectory = getFilesForDirectory;
/**
* Iterates through the makeFileAndDirectoryGenerator function and gets all the Terraform files in the specified directory
* @param pathToScan - the pathToScan to scan provided by the user
* @returns {Generator<string>} - a generator which holds all the filepaths
*/
function* getTerraformFilesInDirectoryGenerator(pathToScan) {
for (const filePath of handle_terraform_files_utils_1.makeFileAndDirectoryGenerator(pathToScan)) {
if (filePath.file && filePath.file.dir !== pathToScan) {
// we want to get files that belong just to the current walking directory, not the ones in nested directories
continue;
}
if (filePath.file &&
handle_terraform_files_utils_1.shouldBeParsed(filePath.file.fileName) &&
!handle_terraform_files_utils_1.isIgnoredFile(filePath.file.fileName)) {
yield filePath.file.fileName;
}
}
}
exports.getTerraformFilesInDirectoryGenerator = getTerraformFilesInDirectoryGenerator;
/***/ }),
/***/ 55409:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.removeFileContent = exports.test = void 0;
const detect_1 = __webpack_require__(45318);
const types_1 = __webpack_require__(42258);
const analytics_1 = __webpack_require__(22716);
const usage_tracking_1 = __webpack_require__(43156);
const policy_1 = __webpack_require__(25476);
const measurable_methods_1 = __webpack_require__(78272);
const unsupported_entitlement_error_1 = __webpack_require__(78673);
const config_1 = __webpack_require__(22541);
const policy_2 = __webpack_require__(32615);
const feature_flags_1 = __webpack_require__(63011);
const rules_1 = __webpack_require__(98895);
const file_loader_1 = __webpack_require__(13552);
const share_results_1 = __webpack_require__(3558);
const monitor_1 = __webpack_require__(3708);
// this method executes the local processing engine and then formats the results to adapt with the CLI output.
// this flow is the default GA flow for IAC scanning.
async function test(pathToScan, options) {
var _a, _b;
try {
const orgPublicId = (_a = options.org) !== null && _a !== void 0 ? _a : config_1.default.org;
const iacOrgSettings = await measurable_methods_1.getIacOrgSettings(orgPublicId);
if (!((_b = iacOrgSettings.entitlements) === null || _b === void 0 ? void 0 : _b.infrastructureAsCode)) {
throw new unsupported_entitlement_error_1.UnsupportedEntitlementError('infrastructureAsCode');
}
// Parse tags and attributes right now, so we can exit early if the user
// provided invalid values.
const tags = parseTags(options);
const attributes = parseAttributes(options);
const rulesOrigin = await rules_1.initRules(iacOrgSettings, options);
const policy = await policy_2.findAndLoadPolicy(pathToScan, 'iac', options);
let parsedFiles = [];
let failedFiles = [];
const isTFVarSupportEnabled = (await feature_flags_1.isFeatureFlagSupportedForOrg('iacTerraformVarSupport', iacOrgSettings.meta.org)).ok;
// if TF vars enabled, valid files are all except terraform files
const validFileTypes = isTFVarSupportEnabled
? types_1.VALID_FILE_TYPES.filter((fileType) => fileType !== types_1.ValidFileType.Terraform &&
fileType !== types_1.ValidFileType.TFVARS)
: undefined;
try {
// load and parse all files that are a valid file type
const filesToParse = await measurable_methods_1.loadFiles(pathToScan, options, validFileTypes);
({ parsedFiles, failedFiles } = await measurable_methods_1.parseFiles(filesToParse, options));
}
catch (err) {
if (validFileTypes &&
!validFileTypes.includes(types_1.ValidFileType.Terraform) &&
err instanceof file_loader_1.NoFilesToScanError) {
// ignore this error since we might only have .tf files in the folder and we will parse them in the next block
}
else {
throw err;
}
}
// we may have loaded and parsed all but terraform files in the previous step
// so now we check if we need to do a second load and parse which dereferences TF vars
if (validFileTypes && !validFileTypes.includes(types_1.ValidFileType.Terraform)) {
try {
const { parsedFiles: tfParsedFiles, failedFiles: tfFailedFiles, } = await measurable_methods_1.loadAndParseTerraformFiles(pathToScan, options.detectionDepth);
parsedFiles = parsedFiles.concat(tfParsedFiles);
failedFiles = failedFiles.concat(tfFailedFiles);
}
catch (err) {
if (parsedFiles.length > 0 && err instanceof file_loader_1.NoFilesToScanError) {
// ignore this error since we've discovered non-terraform files in the previous block
}
else {
throw err;
}
}
}
// Duplicate all the files and run them through the custom engine.
if (rulesOrigin !== types_1.RulesOrigin.Internal) {
parsedFiles.push(...parsedFiles.map((file) => ({
...file,
engineType: types_1.EngineType.Custom,
})));
}
const scannedFiles = await measurable_methods_1.scanFiles(parsedFiles);
const resultsWithCustomSeverities = await measurable_methods_1.applyCustomSeverities(scannedFiles, iacOrgSettings.customPolicies);
let projectPublicIds = {};
let gitRemoteUrl;
if (options.report) {
({ projectPublicIds, gitRemoteUrl } = await share_results_1.formatAndShareResults({
results: resultsWithCustomSeverities,
options,
orgPublicId,
policy,
tags,
attributes,
}));
}
const formattedResults = measurable_methods_1.formatScanResults(resultsWithCustomSeverities, options, iacOrgSettings.meta, projectPublicIds, gitRemoteUrl);
const { filteredIssues, ignoreCount } = policy_1.filterIgnoredIssues(policy, formattedResults);
try {
await measurable_methods_1.trackUsage(filteredIssues);
}
catch (e) {
if (e instanceof usage_tracking_1.TestLimitReachedError) {
throw e;
}
// If something has gone wrong, err on the side of allowing the user to
// run their tests by squashing the error.
}
analytics_1.addIacAnalytics(filteredIssues, {
ignoredIssuesCount: ignoreCount,
rulesOrigin,
});
// TODO: add support for proper typing of old TestResult interface.
return {
results: filteredIssues,
// NOTE: No file or parsed file data should leave this function.
failures: detect_1.isLocalFolder(pathToScan)
? failedFiles.map(removeFileContent)
: undefined,
};
}
finally {
measurable_methods_1.cleanLocalCache();
}
}
exports.test = test;
function removeFileContent({ filePath, fileType, failureReason, projectType, }) {
return {
filePath,
fileType,
failureReason,
projectType,
};
}
exports.removeFileContent = removeFileContent;
function parseTags(options) {
if (options.report) {
return monitor_1.generateTags(options);
}
}
function parseAttributes(options) {
if (options.report) {
return monitor_1.generateProjectAttributes(options);
}
}
/***/ }),
/***/ 78272:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.loadAndParseTerraformFiles = exports.pull = exports.localTest = exports.cleanLocalCache = exports.trackUsage = exports.formatScanResults = exports.applyCustomSeverities = exports.getIacOrgSettings = exports.scanFiles = exports.parseFiles = exports.loadFiles = exports.initLocalCache = exports.performanceAnalyticsDecorator = exports.asyncPerformanceAnalyticsDecorator = void 0;
const file_loader_1 = __webpack_require__(13552);
const file_parser_1 = __webpack_require__(2314);
const file_scanner_1 = __webpack_require__(16879);
const results_formatter_1 = __webpack_require__(98425);
const usage_tracking_1 = __webpack_require__(43156);
const local_cache_1 = __webpack_require__(6255);
const apply_custom_severities_1 = __webpack_require__(91470);
const get_iac_org_settings_1 = __webpack_require__(1802);
const index_1 = __webpack_require__(55409);
const oci_pull_1 = __webpack_require__(5029);
const analytics_1 = __webpack_require__(22716);
const types_1 = __webpack_require__(42258);
const handle_terraform_files_1 = __webpack_require__(41806);
// Note: The return type of the returned async function needs to be Promise<Val> for
// the compiler to be happy, so we need to unwrap it with the messy
// Awaiter<ReturnType<T>> rather than just using ReturnType<T> directly.
function asyncPerformanceAnalyticsDecorator(measurableMethod, analyticsKey) {
return async function (...args) {
const startTime = Date.now();
const returnValue = await measurableMethod(...args);
const durationMs = Date.now() - startTime;
analytics_1.performanceAnalyticsObject[analyticsKey] = durationMs;
return returnValue;
};
}
exports.asyncPerformanceAnalyticsDecorator = asyncPerformanceAnalyticsDecorator;
function performanceAnalyticsDecorator(measurableMethod, analyticsKey) {
return function (...args) {
const startTime = Date.now();
const returnValue = measurableMethod(...args);
const durationMs = Date.now() - startTime;
analytics_1.performanceAnalyticsObject[analyticsKey] = durationMs;
return returnValue;
};
}
exports.performanceAnalyticsDecorator = performanceAnalyticsDecorator;
const measurableInitLocalCache = asyncPerformanceAnalyticsDecorator(local_cache_1.initLocalCache, types_1.PerformanceAnalyticsKey.InitLocalCache);
exports.initLocalCache = measurableInitLocalCache;
const measurableLoadFiles = asyncPerformanceAnalyticsDecorator(file_loader_1.loadFiles, types_1.PerformanceAnalyticsKey.FileLoading);
exports.loadFiles = measurableLoadFiles;
const measurableParseFiles = asyncPerformanceAnalyticsDecorator(file_parser_1.parseFiles, types_1.PerformanceAnalyticsKey.FileParsing);
exports.parseFiles = measurableParseFiles;
const measurableScanFiles = asyncPerformanceAnalyticsDecorator(file_scanner_1.scanFiles, types_1.PerformanceAnalyticsKey.FileScanning);
exports.scanFiles = measurableScanFiles;
const measurableGetIacOrgSettings = asyncPerformanceAnalyticsDecorator(get_iac_org_settings_1.getIacOrgSettings, types_1.PerformanceAnalyticsKey.OrgSettings);
exports.getIacOrgSettings = measurableGetIacOrgSettings;
const measurableApplyCustomSeverities = asyncPerformanceAnalyticsDecorator(apply_custom_severities_1.applyCustomSeverities, types_1.PerformanceAnalyticsKey.CustomSeverities);
exports.applyCustomSeverities = measurableApplyCustomSeverities;
const measurableCleanLocalCache = performanceAnalyticsDecorator(local_cache_1.cleanLocalCache, types_1.PerformanceAnalyticsKey.CacheCleanup);
exports.cleanLocalCache = measurableCleanLocalCache;
const measurableFormatScanResults = performanceAnalyticsDecorator(results_formatter_1.formatScanResults, types_1.PerformanceAnalyticsKey.ResultFormatting);
exports.formatScanResults = measurableFormatScanResults;
const measurableTrackUsage = asyncPerformanceAnalyticsDecorator(usage_tracking_1.trackUsage, types_1.PerformanceAnalyticsKey.UsageTracking);
exports.trackUsage = measurableTrackUsage;
const measurableLocalTest = asyncPerformanceAnalyticsDecorator(index_1.test, types_1.PerformanceAnalyticsKey.Total);
exports.localTest = measurableLocalTest;
const measurableOciPull = asyncPerformanceAnalyticsDecorator(oci_pull_1.pull, types_1.PerformanceAnalyticsKey.Total);
exports.pull = measurableOciPull;
const measurableLoadAndParseTerraformFiles = asyncPerformanceAnalyticsDecorator(handle_terraform_files_1.loadAndParseTerraformFiles, types_1.PerformanceAnalyticsKey.Total);
exports.loadAndParseTerraformFiles = measurableLoadAndParseTerraformFiles;
/***/ }),
/***/ 91470:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.applyCustomSeverities = void 0;
const _ = __webpack_require__(96486);
async function applyCustomSeverities(scannedFiles, customPolicies) {
if (Object.keys(customPolicies).length > 0) {
return scannedFiles.map((file) => {
const updatedScannedFiles = _.cloneDeep(file);
updatedScannedFiles.violatedPolicies.forEach((existingPolicy) => {
var _a;
const customPolicyForPublicID = customPolicies[existingPolicy.publicId];
if (customPolicyForPublicID) {
existingPolicy.severity = (_a = customPolicyForPublicID.severity) !== null && _a !== void 0 ? _a : existingPolicy.severity;
}
});
return updatedScannedFiles;
});
}
return scannedFiles;
}
exports.applyCustomSeverities = applyCustomSeverities;
/***/ }),
/***/ 93669:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.checkRequiredFieldsMatch = exports.detectConfigType = exports.REQUIRED_ARM_FIELDS = exports.REQUIRED_CLOUDFORMATION_FIELDS = exports.REQUIRED_K8S_FIELDS = void 0;
const constants_1 = __webpack_require__(68620);
const types_1 = __webpack_require__(42258);
exports.REQUIRED_K8S_FIELDS = ['apiVersion', 'kind', 'metadata'];
exports.REQUIRED_CLOUDFORMATION_FIELDS = ['Resources'];
exports.REQUIRED_ARM_FIELDS = ['$schema', 'contentVersion', 'resources'];
function detectConfigType(fileData, parsedIacFiles) {
return parsedIacFiles
.map((parsedFile, docId) => {
if (checkRequiredFieldsMatch(parsedFile, exports.REQUIRED_CLOUDFORMATION_FIELDS)) {
return {
...fileData,
jsonContent: parsedFile,
projectType: constants_1.IacProjectType.CLOUDFORMATION,
engineType: types_1.EngineType.CloudFormation,
docId: fileData.fileType === 'json' ? undefined : docId,
};
}
else if (checkRequiredFieldsMatch(parsedFile, exports.REQUIRED_K8S_FIELDS)) {
return {
...fileData,
jsonContent: parsedFile,
projectType: constants_1.IacProjectType.K8S,
engineType: types_1.EngineType.Kubernetes,
docId: fileData.fileType === 'json' ? undefined : docId,
};
}
else if (checkRequiredFieldsMatch(parsedFile, exports.REQUIRED_ARM_FIELDS)) {
return {
...fileData,
jsonContent: parsedFile,
projectType: constants_1.IacProjectType.ARM,
engineType: types_1.EngineType.ARM,
};
}
else {
return null;
}
})
.filter((f) => !!f);
}
exports.detectConfigType = detectConfigType;
function checkRequiredFieldsMatch(parsedDocument, requiredFields) {
if (!parsedDocument) {
return false;
}
return requiredFields.every((requiredField) => parsedDocument.hasOwnProperty(requiredField));
}
exports.checkRequiredFieldsMatch = checkRequiredFieldsMatch;
/***/ }),
/***/ 11640:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
// This artifact was generated using GopherJS and https://github.com/snyk/snyk-iac-parsers
Object.defineProperty(exports, "__esModule", ({ value: true }));
const gopherJsArtifact = __webpack_require__(24135);
function hclToJsonV2(files) {
return gopherJsArtifact.parseModule(files);
}
exports.default = hclToJsonV2;
/***/ }),
/***/ 31373:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
// This artifact was generated using GopherJS and https://github.com/tmccombs/hcl2json (version v0.3.1)
const gopherJsArtifact = __webpack_require__(6941);
function hclToJson(fileContent) {
return gopherJsArtifact.hcltojson(fileContent);
}
exports.default = hclToJson;
/***/ }),
/***/ 29263:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.FailedToParseTerraformFileError = exports.tryParsingTerraformFile = void 0;
const hcl_to_json_1 = __webpack_require__(31373);
const types_1 = __webpack_require__(42258);
const errors_1 = __webpack_require__(55191);
const error_utils_1 = __webpack_require__(23872);
const constants_1 = __webpack_require__(68620);
function tryParsingTerraformFile(fileData) {
try {
return [
{
...fileData,
jsonContent: hcl_to_json_1.default(fileData.fileContent),
projectType: constants_1.IacProjectType.TERRAFORM,
engineType: types_1.EngineType.Terraform,
},
];
}
catch (err) {
throw new FailedToParseTerraformFileError(fileData.filePath);
}
}
exports.tryParsingTerraformFile = tryParsingTerraformFile;
class FailedToParseTerraformFileError extends errors_1.CustomError {
constructor(filename) {
super('Failed to parse Terraform file');
this.code = types_1.IaCErrorCodes.FailedToParseTerraformFileError;
this.strCode = error_utils_1.getErrorStringCode(this.code);
this.userMessage = `We were unable to parse the Terraform file "${filename}", please ensure it is valid HCL2. This can be done by running it through the 'terraform validate' command.`;
}
}
exports.FailedToParseTerraformFileError = FailedToParseTerraformFileError;
/***/ }),
/***/ 81893:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.FailedToExtractResourcesInTerraformPlanError = exports.tryParsingTerraformPlan = exports.isTerraformPlan = void 0;
const types_1 = __webpack_require__(42258);
const errors_1 = __webpack_require__(55191);
const error_utils_1 = __webpack_require__(23872);
const constants_1 = __webpack_require__(68620);
function terraformPlanReducer(scanInput, resource) {
// TODO: investigate if this reduction logic covers all edge-cases (nested modules, similar names, etc')
const { type, name, mode, index, values } = resource;
const inputKey = mode === 'data' ? 'data' : 'resource';
if (scanInput[inputKey][type]) {
// add new resources of the same type with different names
scanInput[inputKey][type][getResourceName(index, name)] = values || {};
}
else {
// add a new resource type
scanInput[inputKey][type] = { [getResourceName(index, name)]: values };
}
return scanInput;
}
function getExpressions(expressions) {
const result = {};
// expressions can be nested. we are only doing 1 depth to resolve top level depenencies
for (const key of Object.keys(expressions)) {
const referenceKey = getReference(expressions[key]);
if (referenceKey) {
result[key] = referenceKey;
}
}
return result;
}
// this is very naive implementation
// the referenences can be composed of number of keys
// we only going to use the first reference for time being
function getReference(value) {
var _a;
return (_a = value.references) === null || _a === void 0 ? void 0 : _a[0];
}
function getResourceName(index, name) {
return index !== undefined ? `${name}["${index}"]` : name;
}
function resourceChangeReducer(scanInput, resource, isFullScan) {
// TODO: investigate if we need to address also `after_unknown` field.
const { actions, after } = resource.change || { actions: [], after: {} };
if (isValidResourceActions(actions, isFullScan)) {
const resourceForReduction = { ...resource, values: after || {} };
return terraformPlanReducer(scanInput, resourceForReduction);
}
return scanInput;
}
function isValidResourceActions(action, isFullScan) {
const VALID_ACTIONS = isFullScan
? types_1.VALID_RESOURCE_ACTIONS_FOR_FULL_SCAN
: types_1.VALID_RESOURCE_ACTIONS_FOR_DELTA_SCAN;
return VALID_ACTIONS.some((validAction) => {
if (action.length !== validAction.length) {
return false;
}
return validAction.every((field, idx) => action[idx] === field);
});
}
function referencedResourcesResolver(scanInput, resources) {
var _a, _b;
// check root module for references in first depth of attributes
for (const resource of resources) {
const { type, name, mode, index, expressions } = resource;
// don't care about references in data sources for time being
if (mode == 'data') {
continue;
}
const inputKey = 'resource';
// only update the references in resources that have some resolved attributes already
const resolvedResource = (_b = (_a = scanInput[inputKey]) === null || _a === void 0 ? void 0 : _a[type]) === null || _b === void 0 ? void 0 : _b[getResourceName(index, name)];
if (resolvedResource) {
const resourceExpressions = getExpressions(expressions);
for (const key of Object.keys(resourceExpressions)) {
// only add non existing attributes. If we already have resolved value do not overwrite it with reference
if (!resolvedResource[key]) {
resolvedResource[key] = resourceExpressions[key];
}
}
scanInput[inputKey][type][getResourceName(index, name)] = resolvedResource;
}
}
return scanInput;
}
function extractResourceChanges(terraformPlanJson) {
return terraformPlanJson.resource_changes || [];
}
function extractReferencedResources(terraformPlanJson) {
var _a, _b;
return ((_b = (_a = terraformPlanJson.configuration) === null || _a === void 0 ? void 0 : _a.root_module) === null || _b === void 0 ? void 0 : _b.resources) || [];
}
function extractResourcesForScan(terraformPlanJson, isFullScan = false) {
const resourceChanges = extractResourceChanges(terraformPlanJson);
const scanInput = resourceChanges.reduce((memo, curr) => resourceChangeReducer(memo, curr, isFullScan), {
resource: {},
data: {},
});
const referencedResources = extractReferencedResources(terraformPlanJson);
return referencedResourcesResolver(scanInput, referencedResources);
}
function isTerraformPlan(terraformPlanJson) {
const missingRequiredFields = terraformPlanJson.resource_changes === undefined;
return !missingRequiredFields;
}
exports.isTerraformPlan = isTerraformPlan;
function tryParsingTerraformPlan(terraformPlanFile, terraformPlanJson, { isFullScan } = { isFullScan: false }) {
try {
return [
{
...terraformPlanFile,
jsonContent: extractResourcesForScan(terraformPlanJson, isFullScan),
engineType: types_1.EngineType.Terraform,
projectType: constants_1.IacProjectType.TERRAFORM,
},
];
}
catch (err) {
throw new FailedToExtractResourcesInTerraformPlanError();
}
}
exports.tryParsingTerraformPlan = tryParsingTerraformPlan;
// This error is due to the complex reduction logic, so it catches scenarios we might have not covered.
class FailedToExtractResourcesInTerraformPlanError extends errors_1.CustomError {
constructor(message) {
super(message || 'Failed to extract resources from Terraform plan JSON file');
this.code = types_1.IaCErrorCodes.FailedToExtractResourcesInTerraformPlanError;
this.strCode = error_utils_1.getErrorStringCode(this.code);
this.userMessage =
'We failed to extract resource changes from the Terraform plan file, please contact support@snyk.io, if possible with a redacted version of the file';
}
}
exports.FailedToExtractResourcesInTerraformPlanError = FailedToExtractResourcesInTerraformPlanError;
/***/ }),
/***/ 25476:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.filterIgnoredIssues = void 0;
function filterIgnoredIssues(policy, results) {
if (!policy) {
return { filteredIssues: results, ignoreCount: 0 };
}
const vulns = results.map((res) => policy.filter(toIaCVulnAdapter(res), undefined, 'exact'));
const ignoreCount = vulns.reduce((totalIgnored, vuln) => totalIgnored + vuln.filtered.ignore.length, 0);
const filteredIssues = vulns.map((vuln) => toFormattedResult(vuln));
return { filteredIssues, ignoreCount };
}
exports.filterIgnoredIssues = filterIgnoredIssues;
function toIaCVulnAdapter(result) {
return {
vulnerabilities: result.result.cloudConfigResults.map((cloudConfigResult) => {
const annotatedResult = cloudConfigResult;
// Copy the cloudConfigPath array to avoid modifying the original with
// splice.
// Insert the targetFile into the path so that it is taken into account
// when determining whether an ignore rul