aws-lambda-upload
Version:
Package and upload an AWS lambda with its minimal dependencies
378 lines (377 loc) • 18.2 kB
JavaScript
;
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const AWS = require("aws-sdk");
const bluebird_1 = require("bluebird");
const childProcess = require("child_process");
const collect_js_deps_1 = require("collect-js-deps");
const commander = require("commander");
const crypto_1 = require("crypto");
const fse = require("fs-extra");
const path = require("path");
const tmp = require("tmp");
const yaml_cfn_1 = require("yaml-cfn");
tmp.setGracefulCleanup();
const defaultBucket = "aws-lambda-upload";
const dfltLogger = getLogger(true);
function getLogger(verbose) {
return {
info: console.info.bind(console),
debug: verbose ? console.log.bind(console) : (() => { }),
};
}
/**
* Run a command, returning a promise resolved on success. Similar to promisified
* child_process.execFile(), but allows overriding options.stdio, and defaults them to 'inherit'
* (to display the command's output).
*/
function spawn(command, args, options = {}) {
return new Promise((resolve, reject) => {
const c = childProcess.spawn(command, args, Object.assign({ stdio: "inherit" }, options));
c.on("error", (err) => reject(err));
c.on("exit", (code) => {
if (code !== 0) {
reject(new Error(`Command failed with exit code ${code}`));
}
else {
resolve();
}
});
});
}
exports.spawn = spawn;
/**
* Walk topDir directory recursively, calling cb(path, stat) for each entry found.
* If cb() return a promise, it will be waited on before proceeding.
* Visits the tree in lexocographical order, and visits directories before their children.
*/
function fsWalk(topDir, cb) {
return __awaiter(this, void 0, void 0, function* () {
const todo = [[topDir, yield fse.stat(topDir)]];
while (todo.length > 0) {
const [fpath, st] = todo.pop();
yield cb(fpath, st);
if (st.isDirectory()) {
const entries = yield fse.readdir(fpath);
entries.sort().reverse(); // Reverse, so that pop yields entries in sorted order.
const paths = entries.map((e) => path.join(fpath, e));
const stats = yield Promise.all(paths.map((p) => fse.stat(p)));
todo.push(...paths.map((p, i) => [p, stats[i]]));
}
}
});
}
exports.fsWalk = fsWalk;
/**
* Collect and package code starting at the given path, into a temporary zip file.
* @return {Promise<string>} Path to the zip file. It will be deleted on process exit.
*/
function _makeTmpZipFile(startPath, options) {
return __awaiter(this, void 0, void 0, function* () {
const logger = options.logger || dfltLogger;
// Use memoized result if we are given a cache which has it.
if (options.cache && options.cache.has(startPath)) {
logger.debug(`Reusing cached zip file for ${startPath}`);
return options.cache.get(startPath);
}
const args = options.browserifyArgs || [];
if (options.tsconfig) {
args.push("-p", "[", require.resolve("tsify"), "-p", options.tsconfig, "]");
}
const prevNodePath = process.env.NODE_PATH;
const stageDir = yield bluebird_1.fromCallback((cb) => tmp.dir({ prefix: "aws-lambda-", unsafeCleanup: true }, cb));
try {
logger.debug(`Collecting files from ${startPath} in ${stageDir}`);
if (options.nodePath !== undefined) {
process.env.NODE_PATH = options.nodePath;
}
yield collect_js_deps_1.main(["--outdir", stageDir, ...args, startPath]);
// TODO Test what happens when startPath is absolute, and when dirname IS in fact "."
if (path.dirname(startPath) !== ".") {
// If startPath is not at top-level, create a helper top-level alias for it, since Lambdas
// require the entry file to be top-level. (We can't solve this by moving files around as that
// would break relative imports.)
// Force startPath's extension to .js (e.g. if it's being compiled from TypeScript).
const start = path.parse(startPath);
const stubPath = path.join(stageDir, start.name + ".js");
const requirePath = path.relative("", path.join(start.dir, start.name));
yield fse.writeFile(stubPath, `module.exports = require("./${requirePath}");\n`);
}
// Set all timestamps to a constant value (0) for all files, to produce consistent zip files
// that are identical for identical data. Otherwise timestamps cause zip files to never match.
yield fsWalk(stageDir, (fpath, st) => fse.utimes(fpath, st.atime.getTime() / 1000, 0));
// We use file() followed by remove(), rather than tmpName(), so that tmp module remembers to
// clean it up on exit.
const zipPath = yield bluebird_1.fromCallback((cb) => tmp.file({ prefix: "aws-lambda-", postfix: ".zip" }, cb));
// We the destination zip must not exist.
yield fse.remove(zipPath);
yield spawn("zip", ["-q", "-r", "-X", zipPath, "."], { cwd: stageDir });
if (options.cache) {
options.cache.set(startPath, zipPath);
}
return zipPath;
}
finally {
process.env.NODE_PATH = prevNodePath;
fse.remove(stageDir);
}
});
}
/**
* Collect and package lambda code starting at the entry file startPath, to a local zip file at
* outputZipPath. Will overwrite the destination if it exists. Returns outputZipPath.
*/
function packageZipLocal(startPath, outputZipPath, options = {}) {
return __awaiter(this, void 0, void 0, function* () {
const logger = options.logger || dfltLogger;
logger.info(`Packaging ${startPath} to local zip file ${outputZipPath}`);
const tmpPath = yield _makeTmpZipFile(startPath, options);
yield fse.copy(tmpPath, outputZipPath);
logger.info(`Created ${outputZipPath}`);
return outputZipPath;
});
}
exports.packageZipLocal = packageZipLocal;
/**
* Collect and package lambda code and upload it to S3.
* @return Promise for the object {bucket, key} describing the location of the uploaded file.
*/
function packageZipS3(startPath, options = {}) {
return __awaiter(this, void 0, void 0, function* () {
const logger = options.logger || dfltLogger;
const s3Bucket = options.s3Bucket || defaultBucket;
const s3Prefix = options.s3Prefix || "";
logger.info(`Packaging ${startPath} for ${options.region} s3://${s3Bucket}/${s3Prefix}...`);
const s3 = new AWS.S3({
region: options.region,
endpoint: options.s3EndpointUrl,
// Fixes a bug when using a custom endpoint for localstack. For more info, see:
// https://github.com/localstack/localstack/issues/43
s3ForcePathStyle: true,
});
// Check S3 bucket, and create if needed.
try {
yield s3.headBucket({ Bucket: s3Bucket }).promise();
logger.debug(`Bucket s3://${s3Bucket} exists`);
}
catch (err) {
if (err.code === "Forbidden") {
throw new Error(`Can't read s3://${s3Bucket}: grant AWS permissions or change --s3-bucket`);
}
if (err.code !== "NotFound") {
throw err;
}
logger.info(`Bucket s3://${s3Bucket} missing; creating`);
yield s3.createBucket({ Bucket: s3Bucket }).promise();
}
const tmpPath = yield _makeTmpZipFile(startPath, options);
const zipData = yield fse.readFile(tmpPath);
// Get S3 key using s3Prefix + md5 of contents (that's what `aws cloudformation package` does).
const checksumBuf = crypto_1.createHash("md5").update(zipData).digest();
const checksumHex = checksumBuf.toString("hex");
const checksumB64 = checksumBuf.toString("base64");
const prefix = s3Prefix && !s3Prefix.endsWith("/") ? s3Prefix + "/" : (s3Prefix || "");
const key = `${prefix}${checksumHex}.zip`;
logger.debug(`Uploading zipped data to s3://${s3Bucket}/${key}`);
// Skip upload if the object exists (since md5 checksum in key implies the same content).
const { Contents } = yield s3.listObjectsV2({ Bucket: s3Bucket, MaxKeys: 1, Prefix: key }).promise();
const exists = (Contents && Contents.length > 0 && Contents[0].Key === key);
if (exists) {
logger.info(`s3://${s3Bucket}/${key} already exists, skipping upload`);
}
else {
// Do the upload to S3.
yield s3.upload({ Body: zipData, Bucket: s3Bucket, Key: key, ContentMD5: checksumB64 }).promise();
logger.info(`s3://${s3Bucket}/${key} uploaded`);
}
return { bucket: s3Bucket, key };
});
}
exports.packageZipS3 = packageZipS3;
/**
* Collect lambda code, and use it to update AWS Lambda function of the given name.
* @return {Object} Promise for the response as returned by AWS Lambda UpdateFunctionCode.
* See http://docs.aws.amazon.com/lambda/latest/dg/API_UpdateFunctionCode.html
*/
function updateLambda(startPath, lambdaName, options = {}) {
return __awaiter(this, void 0, void 0, function* () {
const logger = options.logger || dfltLogger;
logger.info(`Packaging ${startPath} to update lambda ${lambdaName}`);
const lambda = new AWS.Lambda({
region: options.region,
endpoint: options.lambdaEndpointUrl,
});
const tmpPath = yield _makeTmpZipFile(startPath, options);
const zipData = yield fse.readFile(tmpPath);
const resp = yield lambda.updateFunctionCode({ FunctionName: lambdaName, ZipFile: zipData }).promise();
logger.info(`Updated lambda ${lambdaName} version ${resp.Version} ` +
`(${resp.CodeSize} bytes) at ${resp.LastModified}`);
return resp;
});
}
exports.updateLambda = updateLambda;
function parseTemplate(text, ext) {
try {
return JSON.parse(text);
}
catch (e1) {
if (ext === "json") {
throw new Error(`Unable to parse JSON template: ${e1}`);
}
try {
return yaml_cfn_1.yamlParse(text);
}
catch (e2) {
throw new Error(`Unable to parse YAML template: ${e2}`);
}
}
}
/**
* Parse the CloudFormation template at the given path (must be in JSON or YAML format), package
* any code mentioned in it to S3, replace template references with S3 locations, and return the
* adjusted template object.
*
* It is similar to `aws cloudformation package` command. It recognizes these template keys:
*
* - `Code` property in `Resource` with `Type: AWS::Lambda::Function`.
* - `CodeUri` property in `Resource` with `Type: AWS::Serverless::Function`.
*
* When these contain a file path, it'll be interpreted as a JS entry file, packaged using
* `packageZipS3()`, and replaced in the output template with appropriate S3 info. The path may be
* relative to the directory containing templatePath.
*
* @param templatePath: Path to the JSON or YAML template file.
* @return The template object, with certain entries replaced with S3 locations.
*/
function cloudformationPackage(templatePath, options = {}) {
return __awaiter(this, void 0, void 0, function* () {
const logger = options.logger || dfltLogger;
logger.info(`Processing template ${templatePath}`);
const templateText = yield fse.readFile(templatePath, "utf8");
const template = parseTemplate(templateText, path.extname(templatePath));
if (!options.cache) {
options = Object.assign({ cache: new Map() }, options);
}
// Helper function to process a single property in the template file.
function resolveCfnPath(key, resource, prop, converter) {
return __awaiter(this, void 0, void 0, function* () {
if (!resource.Properties) {
logger.debug(`Template resource ${key} has no properties; skipping`);
return;
}
const cfnValue = resource.Properties[prop];
if (typeof cfnValue !== "string" || /^(s3|https?):\/\//.test(cfnValue)) {
logger.debug(`Template property ${prop} of ${key} is not a path; skipping`);
return;
}
const cfnPath = path.resolve(path.dirname(templatePath), cfnValue);
if (!(yield fse.pathExists(cfnPath))) {
logger.info(`Template property ${prop} of ${key}: ${cfnPath} does not exist; skipping`);
return;
}
const s3Info = yield packageZipS3(cfnPath, options);
resource.Properties[prop] = converter(s3Info);
logger.info(`Template property ${prop} of ${key} updated`);
});
}
for (const key of Object.keys(template.Resources)) {
const res = template.Resources[key];
if (res.Type === "AWS::Lambda::Function") {
yield resolveCfnPath(key, res, "Code", (obj) => ({ S3Bucket: obj.bucket, S3Key: obj.key }));
}
else if (res.Type === "AWS::Serverless::Function") {
yield resolveCfnPath(key, res, "CodeUri", (obj) => `s3://${obj.bucket}/${obj.key}`);
}
}
return template;
});
}
exports.cloudformationPackage = cloudformationPackage;
/**
* As cloudformationPackage, but writes the adjusted template to the given output file as JSON.
* See cloudformationPackage() for other parameters.
* @param outputPath: Path to the JSON output file. May be "-" for stdout.
* @return Promise that's resolved when the template has been written.
*/
function cloudformationPackageOutput(templatePath, outputPath, options) {
return __awaiter(this, void 0, void 0, function* () {
const logger = options.logger || dfltLogger;
const template = yield cloudformationPackage(templatePath, options);
const json = JSON.stringify(template, null, 2);
logger.info(`Writing out process template to ${outputPath}`);
if (outputPath === "-") {
return bluebird_1.fromCallback((cb) => process.stdout.write(json, "utf8", cb));
}
else {
return fse.writeFile(outputPath, json);
}
});
}
exports.cloudformationPackageOutput = cloudformationPackageOutput;
/**
* Main entry point when used from the command line.
*/
function main() {
commander
.description("Package node.js code for AWS lambda with its minimal dependencies.\n" +
" Extra arguments given after -- will be passed to browserify")
.usage("[options] <start-file> -- [browserify-options]")
.option("-l, --lambda <name>", "Name of lambda function to update")
.option("--zip <output-path>", "Save the packaged lambda code to a local zip file")
.option("--s3", "Save the packaged lambda code to S3, and print the S3 URI to stdout")
.option("--cfn <output-path>", "Interpret <start-path> as a CloudFormation template\n" +
" (.json or .yml), and package lambda code mentioned there. Replaces code references\n" +
" with S3 locations, and outpus adjusted template to <output-path> ('-' for stdout)")
.option("-r, --region <string>", "AWS region to use, for --lambda or --s3 flags")
.option("--s3-bucket <string>", "S3 bucket to which to upload zip files " +
`(default ${defaultBucket})`, defaultBucket)
.option("--s3-prefix <string>", "Prefix (folder) added to zip files uploaded to S3", "")
.option("--s3-endpoint-url <string>", "Override S3 endpoint url", (v) => new AWS.Endpoint(v))
.option("-v, --verbose", "Produce verbose output")
.option("--tsconfig <path>", "If given, support TypeScript, and use the given\n" +
" config file, or directory containing tsconfig.json");
const { args, unknown } = commander.parseOptions(process.argv.slice(2));
const opts = commander.opts();
if (args.length === 0 || unknown.length > 0 || !(opts.lambda || opts.zip || opts.s3 || opts.cfn)) {
commander.outputHelp();
if (unknown.length > 0) {
process.stdout.write(`\nUnknown option(s) ${unknown.join(", ")}\n`);
}
return;
}
const startFile = args[0];
const browserifyArgs = args.slice(1);
const logger = getLogger(commander.verbose);
const cache = new Map();
return dispatchWork(startFile, Object.assign(opts, { browserifyArgs, logger, cache }))
.catch((err) => {
logger.info("Error", err.message);
process.exit(1);
});
}
exports.main = main;
function dispatchWork(startFile, options) {
return __awaiter(this, void 0, void 0, function* () {
if (options.cfn) {
yield cloudformationPackageOutput(startFile, options.cfn, options);
// Interprets startFile differently, so doesn't make sense to combine with other options.
return;
}
if (options.zip) {
yield packageZipLocal(startFile, options.zip, options);
}
if (options.s3) {
yield packageZipS3(startFile, options);
}
if (options.lambda) {
yield updateLambda(startFile, options.lambda, options);
}
});
}