UNPKG

box-node-sdk

Version:

Official SDK for Box Plaform APIs

307 lines 12.2 kB
"use strict"; /** * @fileoverview Upload manager for large file uploads */ var __extends = (this && this.__extends) || (function () { var extendStatics = function (d, b) { extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; return extendStatics(d, b); }; return function (d, b) { if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; var bluebird_1 = require("bluebird"); // ----------------------------------------------------------------------------- // Requirements // ----------------------------------------------------------------------------- var events_1 = require("events"); var stream_1 = require("stream"); var crypto_1 = __importDefault(require("crypto")); // ----------------------------------------------------------------------------- // Private // ----------------------------------------------------------------------------- var DEFAULT_OPTIONS = Object.freeze({ parallelism: 4, retryInterval: 1000, }); /** * Chunk of a file to be uploaded, which handles trying to upload itself until * it succeeds. * @private */ var Chunk = /** @class */ (function (_super) { __extends(Chunk, _super); /** * Create a Chunk, representing a part of a file being uploaded * @param {BoxClient} client The Box SDK client * @param {string} sessionID The ID of the upload session the chunk belongs to * @param {Buffer|string} chunk The chunk that was uploaded * @param {int} offset The byte offset within the file where this chunk begins * @param {int} totalSize The total size of the file this chunk belongs to * @param {Object} options The options from the ChunkedUploader * @param {int} options.retryInterval The number of ms to wait before retrying a chunk upload */ function Chunk(client, sessionID, chunk, offset, totalSize, options) { var _this = _super.call(this) || this; _this.client = client; _this.sessionID = sessionID; _this.chunk = chunk; _this.length = chunk.length; _this.offset = offset; _this.totalSize = totalSize; _this.options = options; _this.data = null; _this.retry = null; _this.canceled = false; return _this; } /** * Get the final object representation of this chunk for the API * @returns {UploadPart} The chunk object */ Chunk.prototype.getData = function () { return this.data.part; }; /** * Upload a chunk to the API * @returns {void} * @emits Chunk#uploaded * @emits Chunk#error */ Chunk.prototype.upload = function () { var _this = this; this.client.files.uploadPart(this.sessionID, this.chunk, this.offset, this.totalSize, function (err /* FIXME */, data /* FIXME */) { if (_this.canceled) { _this.chunk = null; return; } if (err) { // handle the error or retry if (err.statusCode) { // an API error, probably not retryable! _this.emit('error', err); } else { // maybe a network error, retry _this.retry = setTimeout(function () { return _this.upload(); }, _this.options.retryInterval); } return; } // Record the chunk data for commit, and try to free up the chunk buffer _this.data = data; _this.chunk = null; _this.emit('uploaded', data); }); }; /** * Cancel trying to upload a chunk, preventing it from retrying and clearing * the associated buffer * @returns {void} */ Chunk.prototype.cancel = function () { clearTimeout(this.retry); // number or NodeJS.Timeout this.chunk = null; this.canceled = true; }; return Chunk; }(events_1.EventEmitter)); // ----------------------------------------------------------------------------- // Public // ----------------------------------------------------------------------------- /** Manager for uploading a file in chunks */ var ChunkedUploader = /** @class */ (function (_super) { __extends(ChunkedUploader, _super); /** * Create an upload manager * @param {BoxClient} client The client to use to upload the file * @param {Object} uploadSessionInfo The upload session info to use for chunked upload * @param {ReadableStream|Buffer|string} file The file to upload * @param {int} size The size of the file to be uploaded * @param {Object} [options] Optional parameters * @param {int} [options.retryInterval=1000] The number of ms to wait before retrying operations * @param {int} [options.parallelism=4] The number of concurrent chunks to upload * @param {Object} [options.fileAttributes] Attributes to set on the file during commit */ function ChunkedUploader(client, uploadSessionInfo, file, size, options) { var _this = _super.call(this) || this; _this._client = client; _this._sessionID = uploadSessionInfo.id; _this._partSize = uploadSessionInfo.part_size; _this._uploadSessionInfo = uploadSessionInfo; if (file instanceof stream_1.Readable) { // Pause the stream so we can read specific chunks from it _this._stream = file.pause(); _this._streamBuffer = []; } else if (file instanceof Buffer || typeof file === 'string') { _this._file = file; } else { throw new TypeError('file must be a Stream, Buffer, or string!'); } _this._size = size; _this._options = Object.assign({}, DEFAULT_OPTIONS, options); _this._isStarted = false; _this._numChunksInFlight = 0; _this._chunks = []; _this._position = 0; _this._fileHash = crypto_1.default.createHash('sha1'); return _this; } /** * Start an upload * @returns {Promise<Object>} A promise resolving to the uploaded file */ ChunkedUploader.prototype.start = function () { var _this = this; if (this._isStarted) { return this._promise; } // Create the initial chunks for (var i = 0; i < this._options.parallelism; i++) { this._getNextChunk(function (chunk /* FIXME */) { return chunk ? _this._uploadChunk(chunk) : _this._commit(); }); } this._isStarted = true; /* eslint-disable promise/avoid-new */ this._promise = new bluebird_1.Promise(function (resolve, reject) { _this._resolve = resolve; _this._reject = reject; }); /* eslint-enable promise/avoid-new */ return this._promise; }; /** * Abort a running upload, which cancels all currently uploading chunks, * attempts to free up held memory, and aborts the upload session. This * cannot be undone or resumed. * @returns {Promise} A promise resolving when the upload is aborted * @emits ChunkedUploader#aborted * @emits ChunkedUploader#abortFailed */ ChunkedUploader.prototype.abort = function () { var _this = this; this._chunks.forEach(function (chunk) { return chunk.removeAllListeners().cancel(); }); this._chunks = []; this._file = null; this._stream = null; return (this._client.files .abortUploadSession(this._sessionID) /* eslint-disable promise/always-return */ .then(function () { _this.emit('aborted'); }) /* eslint-enable promise/always-return */ .catch(function (err /* FIXME */) { _this.emit('abortFailed', err); throw err; })); }; /** * Get the next chunk of the file to be uploaded * @param {Function} callback Called with the next chunk of the file to be uploaded * @returns {void} * @private */ ChunkedUploader.prototype._getNextChunk = function (callback) { var _this = this; if (this._position >= this._size) { callback(null); return; } var buf; if (this._file) { // Buffer/string case, just get the slice we need buf = this._file.slice(this._position, this._position + this._partSize); } else if (this._streamBuffer.length > 0) { buf = this._streamBuffer.shift(); } else { // Stream case, need to read buf = this._stream.read(this._partSize); if (!buf) { // stream needs to read more, retry later setImmediate(function () { return _this._getNextChunk(callback); }); return; } else if (buf.length > this._partSize) { // stream is done reading and had extra data, buffer the remainder of the file for (var i = 0; i < buf.length; i += this._partSize) { this._streamBuffer.push(buf.slice(i, i + this._partSize)); } buf = this._streamBuffer.shift(); } } this._fileHash.update(buf); var chunk = new Chunk(this._client, this._sessionID, buf, this._position, this._size, this._options); this._position += buf.length; callback(chunk); }; /** * Upload a chunk * @param {Chunk} chunk The chunk to upload * @returns {void} * @emits ChunkedUploader#chunkError * @emits ChunkedUploader#chunkUploaded */ ChunkedUploader.prototype._uploadChunk = function (chunk /* FIXME */) { var _this = this; this._numChunksInFlight += 1; chunk.on('error', function (err /* FIXME */) { return _this.emit('chunkError', err); }); chunk.on('uploaded', function (data /* FIXME */) { _this._numChunksInFlight -= 1; _this.emit('chunkUploaded', data); _this._getNextChunk(function (nextChunk /* FIXME */) { return nextChunk ? _this._uploadChunk(nextChunk) : _this._commit(); }); }); chunk.upload(); this._chunks.push(chunk); }; /** * Commit the upload, finalizing it * @returns {void} * @emits ChunkedUploader#uploadComplete * @emits ChunkedUploader#error */ ChunkedUploader.prototype._commit = function () { var _this = this; if (!this._isStarted || this._numChunksInFlight > 0) { return; } var hash = this._fileHash.digest('base64'); this._isStarted = false; var options = Object.assign({ parts: this._chunks.map(function (c) { return c.getData(); }), }, this._options.fileAttributes); this._client.files.commitUploadSession(this._sessionID, hash, options, function (err /* FIMXE */, file /* FIMXE */) { // It's not clear what the SDK can do here, so we just return the error and session info // so users can retry if they wish if (err) { _this.emit('error', { uploadSession: _this._uploadSessionInfo, error: err, }); _this._reject(err); return; } _this.emit('uploadComplete', file); _this._resolve(file); }); }; return ChunkedUploader; }(events_1.EventEmitter)); module.exports = ChunkedUploader; //# sourceMappingURL=chunked-uploader.js.map