@invisit/webpack-aws-lambda-auto-deploy-plugin
Version:
Uploads compiled assets to s3 after build
333 lines • 13.4 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const Path = __importStar(require("path"));
const lodash_1 = require("lodash");
const AWS = __importStar(require("aws-sdk"));
const archiver_1 = __importDefault(require("archiver"));
const Sh = __importStar(require("shelljs"));
const helpers_1 = require("./helpers");
const types_1 = require("./types");
const prelude_ts_1 = require("@3fv/prelude-ts");
const Fs = __importStar(require("fs"));
const deferred_1 = require("@3fv/deferred");
const events_1 = require("events");
const bluebird_1 = __importDefault(require("bluebird"));
const p_queue_1 = __importDefault(require("p-queue"));
const readFileAsync = bluebird_1.default.promisify(Fs.readFile);
const deployQueue = new p_queue_1.default({
concurrency: 1
});
const outputDir = prelude_ts_1.asOption(Sh.tempdir())
.tap(dir => {
Sh.mkdir("-p", dir);
})
.get();
let deployCounter = 0;
const log = helpers_1.getLogger();
const debug = helpers_1.getDebug();
const compileError = (compilation, err) => {
compilation.errors.push(err instanceof Error ? err : new Error(err));
};
class AWSLambdaAutoDeployPlugin {
constructor(config, awsConfig = config.aws?.config ?? {}, entryMap = mapToEntries(config.mappings)) {
this.config = config;
this.awsConfig = awsConfig;
this.entryMap = entryMap;
this.events = new events_1.EventEmitter({
captureRejections: true
});
this.pkg = require(Path.join(helpers_1.RootPluginDir, "package.json"));
this.name = this.pkg.name;
this.clients = {
s3: undefined,
lambda: undefined
};
/**
* Deploy the compilation to the configured
* entry <-> lambda mappings
*
* @param {webpack.compilation.Compilation} compilation
* @param {EntryLambdaMapping} entryMapping
* @returns {Promise<void>}
*/
this.deploy = async ([compilation, { entry, fn: fns }]) => {
deployCounter++;
const entryOutputPath = compilation.outputOptions.path;
// const lockfile = Path.join(entryOutputPath, `.webpack-lambda-autodeploy.lock`)
// if (!Sh.test("-e", lockfile)) {
// Sh.touch(lockfile)
// }
//
// if (await Lockfile.check(lockfile)) {
// log.warn(`lock file is locked ${lockfile}`)
// return
// }
//
// await Lockfile.lock(lockfile)
// .then(async release => {
try {
const entryFiles = lodash_1.uniq(Object.entries(compilation.assets).map(([name, out]) => prelude_ts_1.asOption(out.existsAt)
.orElse(() => prelude_ts_1.asOption(Path.join(entryOutputPath, name)))
.filter(Fs.existsSync)
.getOrThrow("existsAt is not defined")));
log.info(`Deploying entry (${entry}) to functions ${fns.join(", ")}: `, entryFiles);
try {
const archiveFile = await this.archive(entry, entryOutputPath, entryFiles);
const { config } = this, storageConfig = config.aws?.storage ??
{
type: "lambda"
};
const zipFileBuf = Fs.readFileSync(archiveFile, null);
const archiveSize = zipFileBuf.length;
debug(`Using bundle of size: ${archiveSize}bytes`);
log.info(`Using bundle of size: ${archiveSize}`);
if (storageConfig.type === "s3") {
const s3StorageConfig = storageConfig;
const { bucket, pathPrefix = "", namePrefix = "" } = s3StorageConfig;
const path = pathPrefix.replace(/^\//, "").replace(/\/$/, ""), isValidPath = path.length > 0, key = `${isValidPath ? path + "/" : ""}${namePrefix}${entry}${deployCounter}.zip`;
log.info(`Uploading s3://${bucket}/${key}`);
await this.s3
.putObject({
Bucket: bucket,
Key: key,
ContentType: "application/zip",
Body: zipFileBuf
})
.promise();
log.info(`Uploaded s3://${bucket}/${key}`);
await bluebird_1.default.mapSeries(fns, async (fn) => {
const params = {
FunctionName: fn,
S3Bucket: bucket,
S3Key: key
};
log.trace(`Updating function with params: ${fn}`, params);
const result = await this.lambda
.updateFunctionCode(params)
.promise();
log.trace(`Upload result`, result);
});
}
else {
await bluebird_1.default.mapSeries(fns, async (fn) => {
const params = {
FunctionName: fn,
ZipFile: zipFileBuf
};
log.trace(`Updating fn with zip + params: ${fn}`, params);
const result = await this.lambda
.updateFunctionCode(params)
.promise();
log.trace(`Updated fn with zip buff result`, result);
});
}
//
// this.emit("deploy", {
// phase: "complete",
// storage: storageConfig,
// archiveFile,
// archiveSize,
// functionNames: fns,
// region: this.awsConfig.region,
// timestamp: new Date()
//
//
// } as AWSDeployEvent<Storage>)
}
catch (err) {
log.error(`Failed to deploy archive`, err);
throw err;
}
}
catch (err) {
log.error(`Autodeploy failed`, err);
}
// return release()
// })
};
/**
* Process done compilation event
*
* @param {webpack.Stats | webpack.compilation.MultiStats} statsOrMultiStats
* @returns {Promise<void>}
*/
this.onDone = async (statsOrMultiStats) => {
const { entryMap } = this;
const allStats = helpers_1.isMultiStats(statsOrMultiStats)
? statsOrMultiStats.stats
: [statsOrMultiStats];
if (allStats.some(it => it.hasErrors())) {
log.warn(`Build contains errors, skipping deploy`);
return;
}
const pendingDeployments = lodash_1.uniq(allStats
.map(({ compilation }) => [
compilation,
entryMap[compilation.compiler?.name] ?? entryMap[types_1.DefaultEntryName]
// asOption()
// .map(name => entryMap[name])
// .getOrCall(() => Object.values(entryMap)[0])
])
.filter(([, entry]) => Boolean(entry)));
try {
deployQueue.add(() => bluebird_1.default.mapSeries(pendingDeployments, async (args) => {
const [compilation] = args;
try {
await this.deploy(args);
}
catch (err) {
compilation.errors.push(err);
this.emit("error", err);
}
}))
.catch(err => {
log.error(`Failed deploy`, pendingDeployments, err);
});
}
catch (err) {
log.error(`AutoDeploy failed`, err);
//throw err
}
};
}
/**
* Zip up the assets
*
* @param entry
* @param entryOutputPath
* @param entryFiles
* @returns
*/
async archive(entry, entryOutputPath, entryFiles) {
const deferred = new deferred_1.Deferred(), handleDone = (event, outputFile) => {
log.info(`Done (${event})`, outputFile);
if (!deferred.isSettled()) {
deferred.resolve(outputFile);
}
}, handleError = (err) => {
log.error(`An error has occurred for entry (${entry})`, err);
if (!deferred.isSettled()) {
deferred.reject(err);
}
else {
log.warn(`Received another error, but this archive has already settled`, err);
}
};
let output;
try {
const outputFile = Path.join(outputDir, `${entry}-${deployCounter}-${helpers_1.getFileTimestamp()}.zip`);
output = Fs.createWriteStream(outputFile);
const archive = archiver_1.default("zip", {});
output.on("close", function () {
log.info(`Bundle Complete (${outputFile}): ${archive.pointer()} bytes`);
handleDone("close", outputFile);
});
output.on("end", function () {
log.trace("Data has been drained");
//handleDone("end", outputFile)
});
archive.on("warning", function (err) {
if (err.code === "ENOENT") {
log.warn(`code: ${err.code}`);
}
else {
handleError(err);
}
});
archive.on("error", handleError);
archive.pipe(output);
entryFiles.forEach(file => prelude_ts_1.asOption(file)
.tap(file => log.info(`${file} -> ${outputFile}`))
.tap(file => archive.file(file, {
name: Path.relative(entryOutputPath, file)
})));
await archive.finalize();
//handleDone("finalize", outputFile)
return deferred.promise;
}
catch (err) {
handleError(err);
throw err;
}
}
// get namePrefix() {
// return asOption(this.config)
// .filter(config => config.)
// }
/**
* Entries that have configured functions
*
* @returns {string[]}
*/
get entryNames() {
return Object.keys(this.entryMap);
}
get s3() {
return prelude_ts_1.asOption(this.clients.s3).getOrCall(() => (this.clients.s3 = new AWS.S3(this.awsConfig ?? {})));
}
get lambda() {
return prelude_ts_1.asOption(this.clients.lambda).getOrCall(() => (this.clients.lambda = new AWS.Lambda(this.awsConfig ?? {})));
}
emit(event, data) {
this.events.emit(event, data);
return this;
}
on(event, listener) {
this.events.on(event, listener);
return this;
}
off(event, listener) {
this.events.off(event, listener);
return this;
}
apply(anyCompiler) {
const compiler = anyCompiler;
compiler.hooks.done.tapPromise(this.name, this.onDone);
}
async handleErrors(error, compilation) {
compileError(compilation, `AWSLambdaAutoDeployPlugin: ${error}`);
throw error;
}
}
exports.default = AWSLambdaAutoDeployPlugin;
function mapToEntries(it) {
return (Array.isArray(it)
? it
: [{ fn: it, entry: [types_1.DefaultEntryName] }])
.reduce((map, mapping) => {
const { fn, entry } = mapping;
return {
...map,
[entry]: {
entry,
fn: [
...(map[entry]?.fn ?? []),
...(typeof fn === "string" ? [fn] : fn)
]
}
};
}, {});
}
//# sourceMappingURL=AWSLambdaAutoDeployPlugin.js.map