UNPKG

veendor

Version:

a tool for stroing your npm dependencies in arbitraty storage

177 lines (176 loc) 7.1 kB
'use strict'; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; result["default"] = mod; return result; }; Object.defineProperty(exports, "__esModule", { value: true }); const aws_sdk_1 = __importDefault(require("aws-sdk")); const path_1 = __importDefault(require("path")); const tarWrapper = __importStar(require("../commandWrappers/tarWrapper")); const errors = __importStar(require("../errors")); function validateOptions(options) { if (options.compression && !(options.compression in tarWrapper.compression)) { throw new errors.InvalidOptionsError(`Invalid compression: ${options.compression}`); } if (!options.compression) { options.compression = 'gzip'; } if (!options.bucket) { throw new errors.InvalidOptionsError('`bucket` option must be provided'); } if (!options.objectAcl) { options.objectAcl = 'public-read'; } const forcedS3Options = { apiVersion: '2006-03-01' }; if (!options.s3Options) { options.s3Options = forcedS3Options; } else { Object.assign(options.s3Options, forcedS3Options); } options.__s3 = new aws_sdk_1.default.S3(options.s3Options); } exports.validateOptions = validateOptions; function pull(hash, options, _cachedir, toolsProvider) { return __awaiter(this, void 0, void 0, function* () { const s3 = options.__s3; const filename = `${hash}.tar${tarWrapper.compression[options.compression]}`; let downloadStream; const s3Params = { Bucket: options.bucket, Key: filename, }; const logger = toolsProvider.getLogger(); let meta; let contentLength; try { logger.trace('[s3 pull] marking headObject request to S3'); meta = yield s3.headObject(s3Params).promise(); contentLength = meta.ContentLength; } catch (error) { if (error.statusCode === 404) { throw new errors.BundleNotFoundError(); } else { throw new BundleDownloadError(error.stack); } } logger.trace('[s3 pull] marking getObject request to S3'); downloadStream = s3.getObject(s3Params).createReadStream(); const progressStream = toolsProvider.getProgressStream('pull', contentLength); const tarWrapperToken = {}; const extractPromise = new Promise((resolve, reject) => { downloadStream.once('readable', () => { logger.trace('[s3 pull] downloadStream is readable'); downloadStream.pipe(progressStream); tarWrapper.extractArchiveFromStream(progressStream, options.compression, { controlToken: tarWrapperToken }).then(resolve, reject); progressStream.toggleVisibility(true); }); }); const downloadStreamPromise = new Promise((resolve, reject) => { let done = false; downloadStream.once('error', (error) => { if (!done) { done = true; if (tarWrapperToken.terminate) { tarWrapperToken.terminate(); } progressStream.die(); if (error.statusCode === 404) { return reject(new errors.BundleNotFoundError()); } reject(new BundleDownloadError(error.message)); } }); downloadStream.once('end', () => { logger.trace('[s3 pull] downloadStream end'); if (!done) { done = true; progressStream.die(); resolve(); } }); downloadStream.once('close', () => { if (!done) { done = true; progressStream.die(); resolve(); } }); }); return Promise.all([downloadStreamPromise, extractPromise]); }); } exports.pull = pull; function push(hash, options, _cachedir, toolsProvider) { return __awaiter(this, void 0, void 0, function* () { const filename = `${hash}.tar${tarWrapper.compression[options.compression]}`; const s3 = options.__s3; const controlToken = {}; let bundleExists = false; try { yield s3.headObject({ Bucket: options.bucket, Key: filename, }).promise(); bundleExists = true; } catch (error) { if (error.statusCode !== 404) { throw error; } } if (bundleExists) { throw new errors.BundleAlreadyExistsError(); } const progressStream = toolsProvider.getProgressStream('push'); progressStream.toggleVisibility(true); const { stream: tarWrapperStream, promise: tarWrapperPromise } = tarWrapper .createStreamArchive([path_1.default.resolve(process.cwd(), 'node_modules')], options.compression, { controlToken }); tarWrapperStream.pipe(progressStream); const s3Promise = s3.upload({ Bucket: options.bucket, Key: filename, ACL: options.objectAcl, Body: progressStream, }).promise(); try { yield Promise.all([tarWrapperPromise, s3Promise]); } catch (error) { if (error instanceof errors.VeendorError) { throw error; } throw new BundleUploadError(`${error.statusCode}: ${error.message}`); } finally { if (controlToken.terminate !== undefined) { controlToken.terminate(); } progressStream.die(); } }); } exports.push = push; class BundleDownloadError extends errors.VeendorError { } exports.BundleDownloadError = BundleDownloadError; class BundleUploadError extends errors.VeendorError { } exports.BundleUploadError = BundleUploadError;