@useblacksmith/cache
Version:
Blacksmith Actions cache lib
428 lines • 22.5 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntryAxios = exports.getCacheEntry = exports.getCacheVersion = exports.createHttpClient = exports.getCacheApiUrl = exports.getCacheRequestHeaders = void 0;
const core = __importStar(require("@actions/core"));
const http_client_1 = require("@actions/http-client");
const auth_1 = require("@actions/http-client/lib/auth");
const crypto = __importStar(require("crypto"));
const fs = __importStar(require("fs"));
const url_1 = require("url");
const utils = __importStar(require("./cacheUtils"));
const downloadUtils_1 = require("./downloadUtils");
const options_1 = require("../options");
const requestUtils_1 = require("./requestUtils");
const node_fetch_1 = __importDefault(require("node-fetch"));
const axios_1 = __importDefault(require("axios"));
const versionSalt = '1.0';
function getCacheRequestHeaders() {
var _a, _b, _c, _d;
return {
Accept: createAcceptHeader('application/json', '6.0-preview.1'),
Authorization: `Bearer ${process.env.BLACKSMITH_CACHE_TOKEN}`,
'X-Github-Repo-Name': (_a = process.env.GITHUB_REPO_NAME) !== null && _a !== void 0 ? _a : '',
'X-Blacksmith-VM-ID': (_b = process.env.BLACKSMITH_VM_ID) !== null && _b !== void 0 ? _b : '',
'X-Blacksmith-Raw-VM-ID': (_c = process.env.VM_ID) !== null && _c !== void 0 ? _c : '',
'X-Cache-Region': (_d = process.env.BLACKSMITH_REGION) !== null && _d !== void 0 ? _d : 'eu-central',
'User-Agent': 'axios/cache/v5'
};
}
exports.getCacheRequestHeaders = getCacheRequestHeaders;
function getCacheApiUrl(resource) {
var _a, _b;
let baseUrl = process.env.BLACKSMITH_CACHE_URL;
core.info(`cache backend url: ${baseUrl}`);
if (!baseUrl) {
baseUrl = ((_a = process.env.PETNAME) === null || _a === void 0 ? void 0 : _a.includes('staging'))
? 'https://stagingapi.blacksmith.sh/cache'
: 'https://api.blacksmith.sh/cache';
}
const url = `${baseUrl}/${resource}`;
if ((_b = process.env.PETNAME) === null || _b === void 0 ? void 0 : _b.includes('staging')) {
core.info(`Using staging API: ${url}`);
}
return url;
}
exports.getCacheApiUrl = getCacheApiUrl;
function createAcceptHeader(type, apiVersion) {
return `${type};api-version=${apiVersion}`;
}
function getRequestOptions() {
core.debug(`Setting GITHUB_REPO_NAME: ${process.env['GITHUB_REPO_NAME']}`);
const requestOptions = {
headers: {
Accept: createAcceptHeader('application/json', '6.0-preview.1'),
'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME']
}
};
return requestOptions;
}
function createHttpClient() {
const token = process.env['BLACKSMITH_CACHE_TOKEN'];
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token !== null && token !== void 0 ? token : '');
return new http_client_1.HttpClient('useblacksmith/cache', [bearerCredentialHandler], getRequestOptions());
}
exports.createHttpClient = createHttpClient;
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths;
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt);
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
}
exports.getCacheVersion = getCacheVersion;
function getCacheEntry(keys, paths, options) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const maxRetries = 3;
let retries = 0;
const cacheToken = process.env['BLACKSMITH_CACHE_TOKEN'];
const repoName = process.env['GITHUB_REPO_NAME'];
core.info(`Checking cache for keys ${keys.join(',')} and version ${version} using single-use cache token for repo ${repoName}: ${cacheToken}`);
while (retries <= maxRetries) {
try {
const before = Date.now();
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 3000);
const response = yield (0, node_fetch_1.default)(getCacheApiUrl(resource), {
method: 'GET',
headers: {
Accept: createAcceptHeader('application/json', '6.0-preview.1'),
'X-Github-Repo-Name': repoName || '',
Authorization: `Bearer ${cacheToken}`,
'X-Cache-Region': (_a = process.env['BLACKSMITH_REGION']) !== null && _a !== void 0 ? _a : 'eu-central',
'User-Agent': 'node-fetch/cache'
},
signal: controller.signal
});
clearTimeout(timeoutId);
core.debug(`Cache lookup took ${Date.now() - before}ms`);
// Cache not found
if (response.status === 204) {
// List cache for primary key only if cache miss occurs
if (core.isDebug()) {
yield printCachesListForDiagnostics(keys[0], createHttpClient(), version);
}
return null;
}
if (response.status < 200 || response.status >= 300) {
throw new Error(`Cache service responded with ${response.status}`);
}
const cacheResult = (yield response.json());
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) {
// Cache archiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.');
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
return cacheResult;
}
catch (error) {
const isTimeout = error.name === 'AbortError';
const status = (_b = error.response) === null || _b === void 0 ? void 0 : _b.status;
if ((status && status >= 500) || isTimeout) {
retries++;
if (retries <= maxRetries) {
if (isTimeout) {
core.warning(`Request timed out. Retrying (attempt ${retries} of ${maxRetries})`);
}
else {
core.warning(`Retrying due to error: ${error.message} (attempt ${retries} of ${maxRetries})`);
}
continue;
}
}
if (status) {
throw new Error(`Cache service responded with ${status}`);
}
else if (isTimeout) {
throw new Error('Request timed out after 3 seconds');
}
else {
throw error;
}
}
}
throw new Error(`Failed to get cache entry after ${maxRetries} retries`);
});
}
exports.getCacheEntry = getCacheEntry;
function getCacheEntryAxios(keys, paths, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const maxRetries = 3;
let retries = 0;
const headers = getCacheRequestHeaders();
core.info(`Checking cache for keys ${keys.join(',')} and version ${version} using single-use cache token for repo ${headers['X-Github-Repo-Name']}: ${headers['Authorization']}`);
while (retries <= maxRetries) {
try {
const before = Date.now();
const response = yield axios_1.default.get(getCacheApiUrl(resource), {
headers,
timeout: 3000,
validateStatus: () => true // Don't throw on non-2xx status codes
});
core.debug(`Cache lookup took ${Date.now() - before}ms`);
// Cache not found
if (response.status === 204) {
// List cache for primary key only if cache miss occurs
if (core.isDebug()) {
yield printCachesListForDiagnostics(keys[0], createHttpClient(), version);
}
return null;
}
if (response.status < 200 || response.status >= 300) {
throw new Error(`Cache service responded with ${response.status}`);
}
const cacheResult = response.data;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) {
// Cache archiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.');
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
return cacheResult;
}
catch (error) {
const isTimeout = error.code === 'ECONNABORTED';
const status = (_a = error.response) === null || _a === void 0 ? void 0 : _a.status;
if ((status && status >= 500) || isTimeout) {
retries++;
if (retries <= maxRetries) {
if (isTimeout) {
core.warning(`Request timed out. Retrying (attempt ${retries} of ${maxRetries})`);
}
else {
core.warning(`Retrying due to error: ${error.message} (attempt ${retries} of ${maxRetries})`);
}
continue;
}
}
if (status) {
throw new Error(`Cache service responded with ${status}`);
}
else if (isTimeout) {
throw new Error('Request timed out after 3 seconds');
}
else {
throw error;
}
}
}
throw new Error(`Failed to get cache entry after ${maxRetries} retries`);
});
}
exports.getCacheEntryAxios = getCacheEntryAxios;
function printCachesListForDiagnostics(key, httpClient, version) {
return __awaiter(this, void 0, void 0, function* () {
const resource = `caches?key=${encodeURIComponent(key)}`;
const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
if (response.statusCode === 200) {
const cacheListResult = response.result;
const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount;
if (totalCount && totalCount > 0) {
core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`);
for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) {
core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`);
}
}
}
});
}
function downloadCache(archiveLocation, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
const archiveUrl = new url_1.URL(archiveLocation);
const downloadOptions = (0, options_1.getDownloadOptions)(options);
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
if (downloadOptions.useAzureSdk) {
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
}
else if (downloadOptions.concurrentBlobDownloads) {
// Use concurrent implementation with HttpClient to work around blob SDK issue
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
}
else {
// Otherwise, download using the Actions http-client.
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
}
}
else {
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
// await downloadCacheAxiosMultiPart(archiveLocation, archivePath)
}
});
}
exports.downloadCache = downloadCache;
// Reserve Cache
function reserveCache(key, paths, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const reserveCacheRequest = {
key,
version,
cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize
};
const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
var _a;
return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest, {
'X-Cache-Region': (_a = process.env['BLACKSMITH_REGION']) !== null && _a !== void 0 ? _a : 'eu-central'
});
}));
return response;
});
}
exports.reserveCache = reserveCache;
function getContentRange(start, end) {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
function uploadChunk(resourceUrl, openStream, start, end) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
const additionalHeaders = {
'Content-Type': 'application/octet-stream',
'Content-Length': end - start + 1
};
const s3HttpClient = new http_client_1.HttpClient('useblacksmith/cache');
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
return s3HttpClient.sendStream('PUT', resourceUrl, openStream(), additionalHeaders);
}));
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
core.debug(`Upload chunk failed with status message: ${JSON.stringify(uploadChunkResponse.message.statusMessage)}`);
core.debug(`Upload chunk failed with headers: ${JSON.stringify(uploadChunkResponse.message.headers)}`);
core.debug(`Upload chunk failed with response body: ${yield uploadChunkResponse.readBody()}`);
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return uploadChunkResponse.message.headers.etag;
});
}
function uploadFile(archivePath, urls) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
core.debug(`archivePath: ${archivePath}`);
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const fd = fs.openSync(archivePath, 'r');
const maxChunkSize = 25 * 1024 * 1024; // Matches the chunkSize in our cache service.
core.debug('Awaiting all uploads');
let eTags = [];
try {
eTags = yield Promise.all(urls.map((url, index) => __awaiter(this, void 0, void 0, function* () {
const offset = index * maxChunkSize;
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
const start = offset;
let end = offset + chunkSize - 1;
if (chunkSize !== maxChunkSize) {
end = fileSize - 1;
}
core.debug(`Uploading chunk to ${url}: ${start}-${end}/${fileSize}`);
const eTag = yield uploadChunk(url, () => fs
.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
})
.on('error', error => {
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
}), start, end);
core.debug(`Upload to ${url} complete`);
return eTag !== null && eTag !== void 0 ? eTag : '';
})));
}
catch (error) {
core.debug(`Cache upload failed: ${JSON.stringify(error)}`);
throw error;
}
finally {
fs.closeSync(fd);
}
return eTags;
});
}
function commitCache(httpClient, cacheId, filesize, eTags, uploadId) {
return __awaiter(this, void 0, void 0, function* () {
const commitCacheRequest = {
size: filesize,
eTags,
uploadId
};
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
}));
});
}
function saveCache(cacheId, archivePath, urls, uploadId) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
core.debug('Upload cache');
const eTags = yield uploadFile(archivePath, urls);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize, eTags, uploadId);
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
}
core.info('Cache saved successfully');
});
}
exports.saveCache = saveCache;
//# sourceMappingURL=cacheHttpClient.js.map