@atomist/sdm
Version:
Atomist Software Delivery Machine SDK
141 lines • 6.64 kB
JavaScript
;
/*
* Copyright © 2020 Atomist, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.CompressingGoalCache = exports.CompressionMethod = void 0;
const Deferred_1 = require("@atomist/automation-client/lib/internal/util/Deferred");
const string_1 = require("@atomist/automation-client/lib/internal/util/string");
const fg = require("fast-glob");
const fs = require("fs-extra");
const JSZip = require("jszip");
const os = require("os");
const path = require("path");
const child_process_1 = require("../../../api-helper/misc/child_process");
const FileSystemGoalCacheArchiveStore_1 = require("./FileSystemGoalCacheArchiveStore");
var CompressionMethod;
(function (CompressionMethod) {
CompressionMethod[CompressionMethod["TAR"] = 0] = "TAR";
CompressionMethod[CompressionMethod["ZIP"] = 1] = "ZIP";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
/**
* Cache implementation that caches files produced by goals to an archive that can then be stored,
* using tar and gzip to create the archives per goal invocation (and classifier if present).
*/
class CompressingGoalCache {
constructor(store = new FileSystemGoalCacheArchiveStore_1.FileSystemGoalCacheArchiveStore(), method = CompressionMethod.TAR) {
this.store = store;
this.method = method;
}
async put(gi, project, files, classifier) {
const archiveName = "atomist-cache";
const teamArchiveFileName = path.join(os.tmpdir(), `${archiveName}.${string_1.guid().slice(0, 7)}`);
const slug = `${gi.id.owner}/${gi.id.repo}`;
const spawnLogOpts = {
log: gi.progressLog,
cwd: project.baseDir,
};
let teamArchiveFileNameWithSuffix = teamArchiveFileName;
if (this.method === CompressionMethod.TAR) {
const tarResult = await child_process_1.spawnLog("tar", ["-cf", teamArchiveFileName, ...files], spawnLogOpts);
if (tarResult.code) {
gi.progressLog.write(`Failed to create tar archive '${teamArchiveFileName}' for ${slug}`);
return undefined;
}
const gzipResult = await child_process_1.spawnLog("gzip", ["-3", teamArchiveFileName], spawnLogOpts);
if (gzipResult.code) {
gi.progressLog.write(`Failed to gzip tar archive '${teamArchiveFileName}' for ${slug}`);
return undefined;
}
teamArchiveFileNameWithSuffix += ".gz";
}
else if (this.method === CompressionMethod.ZIP) {
teamArchiveFileNameWithSuffix += ".zip";
try {
const zipResult = await child_process_1.spawnLog("zip", ["-qr", teamArchiveFileNameWithSuffix, ...files], spawnLogOpts);
if (zipResult.error) {
throw zipResult.error;
}
else if (zipResult.code || zipResult.signal) {
const msg = `Failed to run zip binary to create ${teamArchiveFileNameWithSuffix}: ${zipResult.code} (${zipResult.signal})`;
gi.progressLog.write(msg);
throw new Error(msg);
}
}
catch (e) {
const zip = new JSZip();
for (const file of files) {
const p = path.join(project.baseDir, file);
if ((await fs.stat(p)).isFile()) {
zip.file(file, fs.createReadStream(p));
}
else {
const dirFiles = await fg(`${file}/**/*`, { cwd: project.baseDir, dot: true });
for (const dirFile of dirFiles) {
zip.file(dirFile, fs.createReadStream(path.join(project.baseDir, dirFile)));
}
}
}
const defer = new Deferred_1.Deferred();
zip.generateNodeStream({
type: "nodebuffer",
streamFiles: true,
compression: "DEFLATE",
compressionOptions: { level: 6 },
})
.pipe(fs.createWriteStream(teamArchiveFileNameWithSuffix))
.on("finish", () => {
defer.resolve(teamArchiveFileNameWithSuffix);
});
await defer.promise;
}
}
return this.store.store(gi, classifier, teamArchiveFileNameWithSuffix);
}
async remove(gi, classifier) {
await this.store.delete(gi, classifier);
}
async retrieve(gi, project, classifier) {
const archiveName = "atomist-cache";
const teamArchiveFileName = path.join(os.tmpdir(), `${archiveName}.${string_1.guid().slice(0, 7)}`);
await this.store.retrieve(gi, classifier, teamArchiveFileName);
if (fs.existsSync(teamArchiveFileName)) {
if (this.method === CompressionMethod.TAR) {
await child_process_1.spawnLog("tar", ["-xzf", teamArchiveFileName], {
log: gi.progressLog,
cwd: project.baseDir,
});
}
else if (this.method === CompressionMethod.ZIP) {
const zip = await JSZip.loadAsync(await fs.readFile(teamArchiveFileName));
for (const file in zip.files) {
if (zip.files.hasOwnProperty(file)) {
const entry = zip.file(file);
if (!!entry) {
const p = path.join(project.baseDir, file);
await fs.ensureDir(path.dirname(p));
await fs.writeFile(path.join(project.baseDir, file), await zip.file(file).async("nodebuffer"));
}
}
}
}
}
else {
throw Error("No cache entry");
}
}
}
exports.CompressingGoalCache = CompressingGoalCache;
//# sourceMappingURL=CompressingGoalCache.js.map