UNPKG

@invisit/webpack-aws-lambda-auto-deploy-plugin

Version:

Uploads compiled assets to s3 after build

413 lines 17.5 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); const Path = __importStar(require("path")); const lodash_1 = require("lodash"); const AWS = __importStar(require("aws-sdk")); const archiver_1 = __importDefault(require("archiver")); const Sh = __importStar(require("shelljs")); const helpers_1 = require("./helpers"); const types_1 = require("./types"); const prelude_ts_1 = require("@3fv/prelude-ts"); const Fs = __importStar(require("fs")); const deferred_1 = require("@3fv/deferred"); const events_1 = require("events"); const bluebird_1 = __importDefault(require("bluebird")); const p_queue_1 = __importDefault(require("p-queue")); const guard_1 = require("@3fv/guard"); const util_1 = require("util"); const child_process = __importStar(require("child_process")); const debug_1 = __importDefault(require("debug")); //const readFileAsync = Bluebird.promisify(Fs.readFile) const deployQueues = new Map(); const getDeployQueue = (entry) => prelude_ts_1.asOption(deployQueues.get(entry)).getOrCall(() => { const queue = new p_queue_1.default({ concurrency: 1 }); deployQueues.set(entry, queue); return queue; }); const execAsync = util_1.promisify(child_process.exec); const fileExistsAsync = util_1.promisify(Fs.exists); // (file: string) => Bluebird.fromNode(cb => ) const outputDir = prelude_ts_1.asOption(Sh.tempdir()) .tap(dir => { Sh.mkdir("-p", dir); }) .get(); let deployCounter = 0; const log = helpers_1.getLogger(); const debug = helpers_1.getDebug(); const compileError = (compilation, err) => { compilation.errors.push(err instanceof Error ? err : new Error(err)); }; class AWSAutoDeployPlugin { constructor(config, awsConfig = config.aws?.config ?? {}, entryMap = mapToEntries(config.mappings)) { this.config = config; this.awsConfig = awsConfig; this.entryMap = entryMap; this.events = new events_1.EventEmitter({ captureRejections: true }); this.pkg = require(Path.join(helpers_1.RootPluginDir, "package.json")); this.name = this.pkg.name; this.clients = { s3: undefined, lambda: undefined }; /** * Deploy the compilation to the configured * entry <-> lambda mappings * * @param {webpack.compilation.Compilation} compilation * @param {EntryMapping} entryMapping * @returns {Promise<void>} */ this.deploy = async ([compilation, { entry, fn: fns }]) => { deployCounter++; const { preDeployScript: preDeployScriptTemplate } = this.config; const entryOutputPath = compilation.outputOptions.path; const timestamp = helpers_1.getFileTimestamp(); // const lockfile = Path.join(entryOutputPath, `.webpack-lambda-autodeploy.lock`) // if (!Sh.test("-e", lockfile)) { // Sh.touch(lockfile) // } // // if (await Lockfile.check(lockfile)) { // log.warn(`lock file is locked ${lockfile}`) // return // } // // await Lockfile.lock(lockfile) // .then(async release => { try { const entryFiles = lodash_1.uniq(await prelude_ts_1.Future.do(async () => { const entryFiles = await Promise.all(Object.entries(compilation.assets).map(async ([name, out]) => { const file = prelude_ts_1.asOption(out.existsAt) .filter(helpers_1.isNotEmpty) .getOrCall(() => Path.join(entryOutputPath, name)); guard_1.assert(await fileExistsAsync(file), `${file} doesn't exist`); return file; })); log.debug(`Deploying for entries`, entryFiles); return entryFiles; }).toPromise()); let deployFailed = false; if (helpers_1.isStringAndNotEmpty(preDeployScriptTemplate)) { const preDeployScript = preDeployScriptTemplate .replaceAll("{{entry}}", entry) .replaceAll("{{timestamp}}", timestamp) .replaceAll("{{deployCounter}}", deployCounter.toString(10)); deployFailed = await bluebird_1.default.fromNode(cb => { Sh.exec(preDeployScript, { async: true }, (code, stdout, stderr) => { if (code !== 0) { log.error(`exec error (${code}): ${stderr}`); cb(undefined, true); } else { log.debug(`stdout: ${stdout}`); log.debug(`stderr: ${stderr}`); cb(undefined, false); } }); }).catch(err => { log.error(`Script failed to complete, skipping deployment`, err); return true; }); } if (deployFailed) { log.error(`Deploy failed in preDeployScript`); return; } log.info(`Deploying entry (${entry}) to functions ${fns.join(", ")}: `, entryFiles); try { const archiveFile = await this.archive(entry, entryOutputPath, entryFiles); const { config } = this, storageConfig = config.aws?.storage ?? { type: "lambda" }; const zipFileBuf = Fs.readFileSync(archiveFile, null); const archiveSize = zipFileBuf.length; debug(`Using bundle of size: ${archiveSize}bytes`); if (storageConfig.type === "s3") { const s3StorageConfig = storageConfig; const { keyTemplate = types_1.KeyTemplateDefault, bucket, pathPrefix = "", namePrefix = "" } = s3StorageConfig; const path = pathPrefix.replace(/^\//, "").replace(/\/$/, ""); const isValidPath = path.length > 0; const baseKey = keyTemplate .replaceAll("{{entry}}", entry) .replaceAll("{{timestamp}}", timestamp) .replaceAll("{{deployCounter}}", deployCounter.toString(10)); const key = `${isValidPath ? path + "/" : ""}${namePrefix}${baseKey}.zip`; debug(`Uploading s3://${bucket}/${key}`); await this.s3 .putObject({ Bucket: bucket, Key: key, ContentType: "application/zip", Body: zipFileBuf }) .promise(); const artifactUrl = `s3://${bucket}/${key}`; debug(`Uploaded ${artifactUrl}`); if (fns.length) { debug(`Updating lambda functions (${fns.join(",")}) code with artifact(${artifactUrl}`); await bluebird_1.default.mapSeries(fns, async (fn) => { const params = { FunctionName: fn, S3Bucket: bucket, S3Key: key }; log.trace(`Updating function with params: ${fn}`, params); const result = await this.lambda .updateFunctionCode(params) .promise(); log.trace(`Upload result`, result); }); } } else if (fns.length) { debug(`Updating lambda functions (${fns.join(",")}) code blob`); await bluebird_1.default.mapSeries(fns, async (fn) => { const params = { FunctionName: fn, ZipFile: zipFileBuf }; debug(`Updating fn with zip + params: ${fn}`, params); const result = await this.lambda .updateFunctionCode(params) .promise(); debug(`Updated fn with zip buff result`, result); }); } else { throw Error(`No functions defined and no s3 info provided, can not deploy artifacts for ${entry}`); } // // this.emit("deploy", { // phase: "complete", // storage: storageConfig, // archiveFile, // archiveSize, // functionNames: fns, // region: this.awsConfig.region, // timestamp: new Date() // // // } as AWSDeployEvent<Storage>) } catch (err) { log.error(`Failed to deploy archive`, err); throw err; } } catch (err) { log.error(`Autodeploy failed`, err); } // return release() // }) }; /** * Process done compilation event * * @param {webpack.Stats | webpack.compilation.MultiStats} statsOrMultiStats * @returns {Promise<void>} */ this.onDone = async (statsOrMultiStats) => { const { entryMap } = this; const allStats = helpers_1.isMultiStats(statsOrMultiStats) ? statsOrMultiStats.stats : [statsOrMultiStats]; if (allStats.some(it => it.hasErrors())) { log.warn(`Build contains errors, skipping deploy`); return; } const pendingDeployments = lodash_1.uniq(allStats .map(({ compilation }) => [ compilation, entryMap[compilation.compiler?.name] ?? entryMap[types_1.DefaultEntryName] // asOption() // .map(name => entryMap[name]) // .getOrCall(() => Object.values(entryMap)[0]) ]) .filter(([, entry]) => Boolean(entry))); try { pendingDeployments.forEach((args) => { const entry = args[1]?.entry; const [compilation] = args; if (!helpers_1.isNotEmpty(entry)) { log.warn(`No entry info provided in compilation`, args); return entry; } const deployQueue = getDeployQueue(entry); if (deployQueue.size > 2) { log.warn(`Existing deployments in progress: #${deployQueue.size} pending`); return; } deployQueue .add(async () => { try { await this.deploy(args); } catch (err) { compilation.errors.push(err); this.emit("error", err); } }) .catch(err => { log.error(`Failed deploy`, pendingDeployments, err); }); }); } catch (err) { log.error(`AutoDeploy failed`, err); //throw err } }; const { verbose } = lodash_1.defaults(config, { verbose: false }); if (verbose && !debug.enabled) { debug_1.default.enable(helpers_1.DebugAwsAutoDeployCategory); } } /** * Zip up the assets * * @param entry * @param entryOutputPath * @param entryFiles * @returns */ async archive(entry, entryOutputPath, entryFiles) { const deferred = new deferred_1.Deferred(), handleDone = (event, outputFile) => { log.info(`Done (${event})`, outputFile); if (!deferred.isSettled()) { deferred.resolve(outputFile); } }, handleError = (err) => { log.error(`An error has occurred for entry (${entry})`, err); if (!deferred.isSettled()) { deferred.reject(err); } else { log.warn(`Received another error, but this archive has already settled`, err); } }; let output; try { const outputFile = Path.join(outputDir, `${entry}-${deployCounter}-${helpers_1.getFileTimestamp()}.zip`); output = Fs.createWriteStream(outputFile); const archive = archiver_1.default("zip", {}); output.on("close", function () { log.info(`Bundle Complete (${outputFile}): ${archive.pointer()} bytes`); handleDone("close", outputFile); }); output.on("end", function () { log.trace("Data has been drained"); //handleDone("end", outputFile) }); archive.on("warning", function (err) { if (err.code === "ENOENT") { log.warn(`code: ${err.code}`); } else { handleError(err); } }); archive.on("error", handleError); archive.pipe(output); entryFiles.forEach(file => prelude_ts_1.asOption(file) .tap(file => log.info(`${file} -> ${outputFile}`)) .tap(file => archive.file(file, { name: Path.relative(entryOutputPath, file) }))); await archive.finalize(); //handleDone("finalize", outputFile) return deferred.promise; } catch (err) { handleError(err); throw err; } } // get namePrefix() { // return asOption(this.config) // .filter(config => config.) // } /** * Entries that have configured functions * * @returns {string[]} */ get entryNames() { return Object.keys(this.entryMap); } get s3() { return prelude_ts_1.asOption(this.clients.s3).getOrCall(() => (this.clients.s3 = new AWS.S3(this.awsConfig ?? {}))); } get lambda() { return prelude_ts_1.asOption(this.clients.lambda).getOrCall(() => (this.clients.lambda = new AWS.Lambda(this.awsConfig ?? {}))); } emit(event, data) { this.events.emit(event, data); return this; } on(event, listener) { this.events.on(event, listener); return this; } off(event, listener) { this.events.off(event, listener); return this; } apply(anyCompiler) { const compiler = anyCompiler; compiler.hooks.done.tapPromise(this.name, this.onDone); } async handleErrors(error, compilation) { compileError(compilation, `AWSLambdaAutoDeployPlugin: ${error}`); throw error; } } exports.default = AWSAutoDeployPlugin; function mergeEntries(map, mapping) { const { fn, entry } = mapping; return { ...map, [entry]: { entry, fn: [ ...(map[entry]?.fn ?? []), ...(typeof fn === "string" ? [fn] : Array.isArray(fn) ? fn : []) ] } }; } function mapToEntries(it) { return (Array.isArray(it) ? it : [{ fn: it, entry: [types_1.DefaultEntryName] }]).reduce(mergeEntries, {}); } //# sourceMappingURL=AWSAutoDeployPlugin.js.map