sonarqube-scanner
Version:
SonarQube/SonarCloud Scanner for the JavaScript world
142 lines (141 loc) • 6.55 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getCacheFileLocation = getCacheFileLocation;
exports.extractArchive = extractArchive;
exports.validateChecksum = validateChecksum;
exports.getCacheDirectories = getCacheDirectories;
/*
* sonar-scanner-npm
* Copyright (C) SonarSource Sàrl
* mailto:info AT sonarsource DOT com
*
* You can redistribute and/or modify this program under the terms of
* the Sonar Source-Available License Version 1, as published by SonarSource Sàrl.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the Sonar Source-Available License for more details.
*
* You should have received a copy of the Sonar Source-Available License
* along with this program; if not, see https://sonarsource.com/license/ssal/
*/
const adm_zip_1 = __importDefault(require("adm-zip"));
const node_crypto_1 = __importDefault(require("node:crypto"));
const node_path_1 = __importDefault(require("node:path"));
const tar_stream_1 = __importDefault(require("tar-stream"));
const node_zlib_1 = __importDefault(require("node:zlib"));
const constants_1 = require("./constants");
const deps_1 = require("./deps");
const logging_1 = require("./logging");
const types_1 = require("./types");
async function getCacheFileLocation(properties, { checksum, filename, alias }) {
const { fs } = (0, deps_1.getDeps)();
const filePath = node_path_1.default.join(getParentCacheDirectory(properties), checksum, filename);
if (fs.existsSync(filePath)) {
(0, logging_1.log)(logging_1.LogLevel.DEBUG, alias, 'version found in cache:', filename);
// validate cache
try {
await validateChecksum(filePath, checksum);
}
catch (error) {
await fs.remove(filePath);
throw error;
}
return filePath;
}
else {
(0, logging_1.log)(logging_1.LogLevel.INFO, `No Cache found for ${alias}`);
return null;
}
}
async function extractArchive(fromPath, toPath) {
const { fs } = (0, deps_1.getDeps)();
(0, logging_1.log)(logging_1.LogLevel.DEBUG, `Extracting ${fromPath} to ${toPath}`);
if (fromPath.endsWith('.tar.gz')) {
const tarFilePath = fromPath;
const extract = tar_stream_1.default.extract();
const extractionPromise = new Promise((resolve, reject) => {
extract.on('entry', async (header, stream, next) => {
const canonicalPath = node_path_1.default.normalize(toPath + node_path_1.default.sep + header.name);
// Prevent Zip Slip vulnerability by ensuring the path is within the target directory
if (!canonicalPath.startsWith(toPath)) {
stream.resume();
reject(new Error(`Entry "${header.name}" would extract outside target directory`));
return;
}
// Ensure the parent directory exists
fs.mkdirSync(node_path_1.default.dirname(canonicalPath), { recursive: true });
stream.pipe(fs.createWriteStream(canonicalPath, { mode: header.mode }));
stream.on('end', next); // End of file, move onto next file
stream.resume(); // Auto drain the stream
});
extract.on('finish', () => {
resolve(null);
});
extract.on('error', err => {
(0, logging_1.log)(logging_1.LogLevel.ERROR, 'Error extracting tar.gz', err);
reject(err);
});
});
const readStream = fs.createReadStream(tarFilePath);
const gunzip = node_zlib_1.default.createGunzip();
const nextStep = readStream.pipe(gunzip);
nextStep.pipe(extract);
await extractionPromise;
}
else {
const zip = new adm_zip_1.default(fromPath);
for (const entry of zip.getEntries()) {
const canonicalPath = node_path_1.default.normalize(toPath + node_path_1.default.sep + entry.entryName);
// Prevent Zip Slip vulnerability by ensuring the path is within the target directory
if (!canonicalPath.startsWith(toPath)) {
throw new Error(`Entry "${entry.entryName}" would extract outside target directory`);
}
}
zip.extractAllTo(toPath, true, true);
}
}
async function generateChecksum(filepath) {
const { fs } = (0, deps_1.getDeps)();
return new Promise((resolve, reject) => {
fs.readFile(filepath, (err, data) => {
if (err) {
reject(err);
return;
}
resolve(node_crypto_1.default.createHash('sha256').update(data).digest('hex'));
});
});
}
async function validateChecksum(filePath, expectedChecksum) {
if (expectedChecksum) {
(0, logging_1.log)(logging_1.LogLevel.DEBUG, `Verifying checksum ${expectedChecksum}`);
const checksum = await generateChecksum(filePath);
(0, logging_1.log)(logging_1.LogLevel.DEBUG, `Checksum Value: ${checksum}`);
if (checksum !== expectedChecksum) {
throw new Error(`Checksum verification failed for ${filePath}. Expected checksum ${expectedChecksum} but got ${checksum}`);
}
}
else {
throw new Error('Checksum not provided');
}
}
async function getCacheDirectories(properties, { checksum, filename }) {
const { fs } = (0, deps_1.getDeps)();
const archivePath = node_path_1.default.join(getParentCacheDirectory(properties), checksum, filename);
const unarchivePath = node_path_1.default.join(getParentCacheDirectory(properties), checksum, filename + constants_1.UNARCHIVE_SUFFIX);
// Create destination directory if it doesn't exist
const parentCacheDirectory = node_path_1.default.dirname(unarchivePath);
if (!fs.existsSync(parentCacheDirectory)) {
(0, logging_1.log)(logging_1.LogLevel.DEBUG, `Creating Cache directory as it doesn't exist: ${parentCacheDirectory}`);
fs.mkdirSync(parentCacheDirectory, { recursive: true });
}
return { archivePath, unarchivePath };
}
function getParentCacheDirectory(properties) {
return node_path_1.default.join(properties[types_1.ScannerProperty.SonarUserHome], constants_1.SONAR_CACHE_DIR);
}