foundry-toolchain
Version:
This GitHub Action installs [Foundry](https://github.com/foundry-rs/foundry), the blazing fast, portable and modular toolkit for Ethereum application development.
1,109 lines (1,082 loc) • 2.92 MB
JavaScript
require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap
/******/ var __webpack_modules__ = ({
/***/ 7799:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const path = __importStar(__nccwpck_require__(1017));
const utils = __importStar(__nccwpck_require__(1518));
const cacheHttpClient = __importStar(__nccwpck_require__(8245));
const tar_1 = __nccwpck_require__(6490);
class ValidationError extends Error {
constructor(message) {
super(message);
this.name = 'ValidationError';
Object.setPrototypeOf(this, ValidationError.prototype);
}
}
exports.ValidationError = ValidationError;
class ReserveCacheError extends Error {
constructor(message) {
super(message);
this.name = 'ReserveCacheError';
Object.setPrototypeOf(this, ReserveCacheError.prototype);
}
}
exports.ReserveCacheError = ReserveCacheError;
function checkPaths(paths) {
if (!paths || paths.length === 0) {
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
}
}
function checkKey(key) {
if (key.length > 512) {
throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`);
}
const regex = /^[^,]*$/;
if (!regex.test(key)) {
throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
}
}
/**
* isFeatureAvailable to check the presence of Actions cache service
*
* @returns boolean return true if Actions cache service feature is available, otherwise false
*/
function isFeatureAvailable() {
return !!process.env['ACTIONS_CACHE_URL'];
}
exports.isFeatureAvailable = isFeatureAvailable;
/**
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths);
restoreKeys = restoreKeys || [];
const keys = [primaryKey, ...restoreKeys];
core.debug('Resolved Keys:');
core.debug(JSON.stringify(keys));
if (keys.length > 10) {
throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);
}
for (const key of keys) {
checkKey(key);
}
const compressionMethod = yield utils.getCompressionMethod();
let archivePath = '';
try {
// path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod,
enableCrossOsArchive
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
return cacheEntry.cacheKey;
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else {
// Supress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${error.message}`);
}
}
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return undefined;
});
}
exports.restoreCache = restoreCache;
/**
* Saves a list of files with the specified key
*
* @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/
function saveCache(paths, key, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths);
checkKey(key);
const compressionMethod = yield utils.getCompressionMethod();
let cacheId = -1;
const cachePaths = yield utils.resolvePaths(paths);
core.debug('Cache Paths:');
core.debug(`${JSON.stringify(cachePaths)}`);
if (cachePaths.length === 0) {
throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);
}
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
try {
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug('Reserving Cache');
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
compressionMethod,
enableCrossOsArchive,
cacheSize: archiveFileSize
});
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId;
}
else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) {
throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`);
}
else {
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`);
}
else {
core.warning(`Failed to save: ${typedError.message}`);
}
}
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return cacheId;
});
}
exports.saveCache = saveCache;
//# sourceMappingURL=cache.js.map
/***/ }),
/***/ 8245:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const http_client_1 = __nccwpck_require__(1825);
const auth_1 = __nccwpck_require__(2001);
const crypto = __importStar(__nccwpck_require__(6113));
const fs = __importStar(__nccwpck_require__(7147));
const url_1 = __nccwpck_require__(7310);
const utils = __importStar(__nccwpck_require__(1518));
const downloadUtils_1 = __nccwpck_require__(5500);
const options_1 = __nccwpck_require__(6215);
const requestUtils_1 = __nccwpck_require__(3981);
const versionSalt = '1.0';
function getCacheApiUrl(resource) {
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
if (!baseUrl) {
throw new Error('Cache Service Url not found, unable to restore cache.');
}
const url = `${baseUrl}_apis/artifactcache/${resource}`;
core.debug(`Resource Url: ${url}`);
return url;
}
function createAcceptHeader(type, apiVersion) {
return `${type};api-version=${apiVersion}`;
}
function getRequestOptions() {
const requestOptions = {
headers: {
Accept: createAcceptHeader('application/json', '6.0-preview.1')
}
};
return requestOptions;
}
function createHttpClient() {
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
}
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths;
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt);
return crypto
.createHash('sha256')
.update(components.join('|'))
.digest('hex');
}
exports.getCacheVersion = getCacheVersion;
function getCacheEntry(keys, paths, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
// Cache not found
if (response.statusCode === 204) {
// List cache for primary key only if cache miss occurs
if (core.isDebug()) {
yield printCachesListForDiagnostics(keys[0], httpClient, version);
}
return null;
}
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.');
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
return cacheResult;
});
}
exports.getCacheEntry = getCacheEntry;
function printCachesListForDiagnostics(key, httpClient, version) {
return __awaiter(this, void 0, void 0, function* () {
const resource = `caches?key=${encodeURIComponent(key)}`;
const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
if (response.statusCode === 200) {
const cacheListResult = response.result;
const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount;
if (totalCount && totalCount > 0) {
core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`);
for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) {
core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`);
}
}
}
});
}
function downloadCache(archiveLocation, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
const archiveUrl = new url_1.URL(archiveLocation);
const downloadOptions = options_1.getDownloadOptions(options);
if (downloadOptions.useAzureSdk &&
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions);
}
else {
// Otherwise, download using the Actions http-client.
yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath);
}
});
}
exports.downloadCache = downloadCache;
// Reserve Cache
function reserveCache(key, paths, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const reserveCacheRequest = {
key,
version,
cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize
};
const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
}));
return response;
});
}
exports.reserveCache = reserveCache;
function getContentRange(start, end) {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
const additionalHeaders = {
'Content-Type': 'application/octet-stream',
'Content-Range': getContentRange(start, end)
};
const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
}));
if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) {
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
}
});
}
function uploadFile(httpClient, cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, 'r');
const uploadOptions = options_1.getUploadOptions(options);
const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug('Awaiting all uploads');
let offset = 0;
try {
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs
.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
})
.on('error', error => {
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
}), start, end);
}
})));
}
finally {
fs.closeSync(fd);
}
return;
});
}
function commitCache(httpClient, cacheId, filesize) {
return __awaiter(this, void 0, void 0, function* () {
const commitCacheRequest = { size: filesize };
return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () {
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
}));
});
}
function saveCache(cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
core.debug('Upload cache');
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
}
core.info('Cache saved successfully');
});
}
exports.saveCache = saveCache;
//# sourceMappingURL=cacheHttpClient.js.map
/***/ }),
/***/ 1518:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __asyncValues = (this && this.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const exec = __importStar(__nccwpck_require__(1514));
const glob = __importStar(__nccwpck_require__(8090));
const io = __importStar(__nccwpck_require__(7436));
const fs = __importStar(__nccwpck_require__(7147));
const path = __importStar(__nccwpck_require__(1017));
const semver = __importStar(__nccwpck_require__(5911));
const util = __importStar(__nccwpck_require__(3837));
const uuid_1 = __nccwpck_require__(2155);
const constants_1 = __nccwpck_require__(8840);
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
function createTempDirectory() {
return __awaiter(this, void 0, void 0, function* () {
const IS_WINDOWS = process.platform === 'win32';
let tempDirectory = process.env['RUNNER_TEMP'] || '';
if (!tempDirectory) {
let baseLocation;
if (IS_WINDOWS) {
// On Windows use the USERPROFILE env variable
baseLocation = process.env['USERPROFILE'] || 'C:\\';
}
else {
if (process.platform === 'darwin') {
baseLocation = '/Users';
}
else {
baseLocation = '/home';
}
}
tempDirectory = path.join(baseLocation, 'actions', 'temp');
}
const dest = path.join(tempDirectory, uuid_1.v4());
yield io.mkdirP(dest);
return dest;
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSizeInBytes(filePath) {
return fs.statSync(filePath).size;
}
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
var e_1, _a;
var _b;
return __awaiter(this, void 0, void 0, function* () {
const paths = [];
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
const globber = yield glob.create(patterns.join('\n'), {
implicitDescendants: false
});
try {
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
const file = _d.value;
const relativeFile = path
.relative(workspace, file)
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
}
finally { if (e_1) throw e_1.error; }
}
return paths;
});
}
exports.resolvePaths = resolvePaths;
function unlinkFile(filePath) {
return __awaiter(this, void 0, void 0, function* () {
return util.promisify(fs.unlink)(filePath);
});
}
exports.unlinkFile = unlinkFile;
function getVersion(app) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Checking ${app} --version`);
let versionOutput = '';
try {
yield exec.exec(`${app} --version`, [], {
ignoreReturnCode: true,
silent: true,
listeners: {
stdout: (data) => (versionOutput += data.toString()),
stderr: (data) => (versionOutput += data.toString())
}
});
}
catch (err) {
core.debug(err.message);
}
versionOutput = versionOutput.trim();
core.debug(versionOutput);
return versionOutput;
});
}
// Use zstandard if possible to maximize cache performance
function getCompressionMethod() {
return __awaiter(this, void 0, void 0, function* () {
const versionOutput = yield getVersion('zstd');
const version = semver.clean(versionOutput);
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
// zstd is not installed
return constants_1.CompressionMethod.Gzip;
}
else if (!version || semver.lt(version, 'v1.3.2')) {
// zstd is installed but using a version earlier than v1.3.2
// v1.3.2 is required to use the `--long` options in zstd
return constants_1.CompressionMethod.ZstdWithoutLong;
}
else {
return constants_1.CompressionMethod.Zstd;
}
});
}
exports.getCompressionMethod = getCompressionMethod;
function getCacheFileName(compressionMethod) {
return compressionMethod === constants_1.CompressionMethod.Gzip
? constants_1.CacheFilename.Gzip
: constants_1.CacheFilename.Zstd;
}
exports.getCacheFileName = getCacheFileName;
function getGnuTarPathOnWindows() {
return __awaiter(this, void 0, void 0, function* () {
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion('tar');
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
});
}
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
function assertDefined(name, value) {
if (value === undefined) {
throw Error(`Expected ${name} but value was undefiend`);
}
return value;
}
exports.assertDefined = assertDefined;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
}
exports.isGhes = isGhes;
//# sourceMappingURL=cacheUtils.js.map
/***/ }),
/***/ 8840:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var CacheFilename;
(function (CacheFilename) {
CacheFilename["Gzip"] = "cache.tgz";
CacheFilename["Zstd"] = "cache.tzst";
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
var CompressionMethod;
(function (CompressionMethod) {
CompressionMethod["Gzip"] = "gzip";
// Long range mode was added to zstd in v1.3.2.
// This enum is for earlier version of zstd that does not have --long support
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
var ArchiveToolType;
(function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
// The default number of retry attempts.
exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts.
exports.DefaultRetryDelay = 5000;
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
exports.SocketTimeout = 5000;
// The default path of GNUtar on hosted Windows runners
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
// The default path of BSDtar on hosted Windows runners
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
exports.TarFilename = 'cache.tar';
exports.ManifestFilename = 'manifest.txt';
//# sourceMappingURL=constants.js.map
/***/ }),
/***/ 5500:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const http_client_1 = __nccwpck_require__(1825);
const storage_blob_1 = __nccwpck_require__(4100);
const buffer = __importStar(__nccwpck_require__(4300));
const fs = __importStar(__nccwpck_require__(7147));
const stream = __importStar(__nccwpck_require__(2781));
const util = __importStar(__nccwpck_require__(3837));
const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840);
const requestUtils_1 = __nccwpck_require__(3981);
const abort_controller_1 = __nccwpck_require__(2557);
/**
* Pipes the body of a HTTP response to a stream
*
* @param response the HTTP response
* @param output the writable stream
*/
function pipeResponseToStream(response, output) {
return __awaiter(this, void 0, void 0, function* () {
const pipeline = util.promisify(stream.pipeline);
yield pipeline(response.message, output);
});
}
/**
* Class for tracking the download state and displaying stats.
*/
class DownloadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.segmentIndex = 0;
this.segmentSize = 0;
this.segmentOffset = 0;
this.receivedBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Progress to the next segment. Only call this method when the previous segment
* is complete.
*
* @param segmentSize the length of the next segment
*/
nextSegment(segmentSize) {
this.segmentOffset = this.segmentOffset + this.segmentSize;
this.segmentIndex = this.segmentIndex + 1;
this.segmentSize = segmentSize;
this.receivedBytes = 0;
core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);
}
/**
* Sets the number of bytes received for the current segment.
*
* @param receivedBytes the number of bytes received
*/
setReceivedBytes(receivedBytes) {
this.receivedBytes = receivedBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.segmentOffset + this.receivedBytes;
}
/**
* Returns true if the download is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current download stats. Once the download completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.segmentOffset + this.receivedBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const downloadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setReceivedBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the download
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.DownloadProgress = DownloadProgress;
/**
* Download the cache using the Actions toolkit http-client
*
* @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved
*/
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
const writeStream = fs.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient('actions/cache');
const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
// Abort download if no traffic received over the socket.
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
downloadResponse.message.destroy();
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
});
yield pipeResponseToStream(downloadResponse, writeStream);
// Validate download size.
const contentLengthHeader = downloadResponse.message.headers['content-length'];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
if (actualLength !== expectedLength) {
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
}
}
else {
core.debug('Unable to validate download, no Content-Length header');
}
});
}
exports.downloadCacheHttpClient = downloadCacheHttpClient;
/**
* Download the cache using the Azure Storage SDK. Only call this method if the
* URL points to an Azure Storage endpoint.
*
* @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved
* @param options the download options with the defaults set
*/
function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {
retryOptions: {
// Override the timeout used when downloading each 4 MB chunk
// The default is 2 min / MB, which is way too slow
tryTimeoutInMs: options.timeoutInMs
}
});
const properties = yield client.getProperties();
const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;
if (contentLength < 0) {
// We should never hit this condition, but just in case fall back to downloading the
// file as one large stream
core.debug('Unable to determine content length, downloading file with http-client...');
yield downloadCacheHttpClient(archiveLocation, archivePath);
}
else {
// Use downloadToBuffer for faster downloads, since internally it splits the
// file into 4 MB chunks which can then be parallelized and retried independently
//
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
// on 64-bit systems), split the download into multiple segments
// ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs.openSync(archivePath, 'w');
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
}));
if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
}
}
finally {
downloadProgress.stopDisplayTimer();
fs.closeSync(fd);
}
}
});
}
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map
/***/ }),
/***/ 3981:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const http_client_1 = __nccwpck_require__(1825);
const constants_1 = __nccwpck_require__(8840);
function isSuccessStatusCode(statusCode) {
if (!statusCode) {
return false;
}
return statusCode >= 200 && statusCode < 300;
}
exports.isSuccessStatusCode = isSuccessStatusCode;
function isServerErrorStatusCode(statusCode) {
if (!statusCode) {
return true;
}
return statusCode >= 500;
}
exports.isServerErrorStatusCode = isServerErrorStatusCode;
function isRetryableStatusCode(statusCode) {
if (!statusCode) {
return false;
}
const retryableStatusCodes = [
http_client_1.HttpCodes.BadGateway,
http_client_1.HttpCodes.ServiceUnavailable,
http_client_1.HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
exports.isRetryableStatusCode = isRetryableStatusCode;
function sleep(milliseconds) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => setTimeout(resolve, milliseconds));
});
}
function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) {
return __awaiter(this, void 0, void 0, function* () {
let errorMessage = '';
let attempt = 1;
while (attempt <= maxAttempts) {
let response = undefined;
let statusCode = undefined;
let isRetryable = false;
try {
response = yield method();
}
catch (error) {
if (onError) {
response = onError(error);
}
isRetryable = true;
errorMessage = error.message;
}
if (response) {
statusCode = getStatusCode(response);
if (!isServerErrorStatusCode(statusCode)) {
return response;
}
}
if (statusCode) {
isRetryable = isRetryableStatusCode(statusCode);
errorMessage = `Cache service responded with ${statusCode}`;
}
core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
if (!isRetryable) {
core.debug(`${name} - Error is not retryable`);
break;
}
yield sleep(delay);
attempt++;
}
throw Error(`${name} failed: ${errorMessage}`);
});
}
exports.retry = retry;
function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {
return __awaiter(this, void 0, void 0, function* () {
return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay,
// If the error object contains the statusCode property, extract it and return
// an TypedResponse<T> so it can be processed by the retry logic.
(error) => {
if (error instanceof http_client_1.HttpClientError) {
return {
statusCode: error.statusCode,
result: null,
headers: {},
error
};
}
else {
return undefined;
}
});
});
}
exports.retryTypedResponse = retryTypedResponse;
function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {
return __awaiter(this, void 0, void 0, function* () {
return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay);
});
}
exports.retryHttpClientResponse = retryHttpClientResponse;
//# sourceMappingURL=requestUtils.js.map
/***/ }),
/***/ 6490:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const