box-node-sdk
Version:
Official SDK for Box Platform APIs
1,120 lines (1,119 loc) • 51.9 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChunkedUploadsManager = exports.CreateFileUploadSessionCommitHeaders = exports.CreateFileUploadSessionCommitByUrlHeaders = exports.GetFileUploadSessionPartsHeaders = exports.GetFileUploadSessionPartsByUrlHeaders = exports.DeleteFileUploadSessionByIdHeaders = exports.DeleteFileUploadSessionByUrlHeaders = exports.UploadFilePartHeaders = exports.UploadFilePartByUrlHeaders = exports.GetFileUploadSessionByIdHeaders = exports.GetFileUploadSessionByUrlHeaders = exports.CreateFileUploadSessionForExistingFileHeaders = exports.CreateFileUploadSessionHeaders = exports.CreateFileUploadSessionCommitOptionals = exports.CreateFileUploadSessionCommitByUrlOptionals = exports.GetFileUploadSessionPartsOptionals = exports.GetFileUploadSessionPartsByUrlOptionals = exports.DeleteFileUploadSessionByIdOptionals = exports.DeleteFileUploadSessionByUrlOptionals = exports.UploadFilePartOptionals = exports.UploadFilePartByUrlOptionals = exports.GetFileUploadSessionByIdOptionals = exports.GetFileUploadSessionByUrlOptionals = exports.CreateFileUploadSessionForExistingFileOptionals = exports.CreateFileUploadSessionOptionals = void 0;
exports.serializeCreateFileUploadSessionRequestBody = serializeCreateFileUploadSessionRequestBody;
exports.deserializeCreateFileUploadSessionRequestBody = deserializeCreateFileUploadSessionRequestBody;
exports.serializeCreateFileUploadSessionForExistingFileRequestBody = serializeCreateFileUploadSessionForExistingFileRequestBody;
exports.deserializeCreateFileUploadSessionForExistingFileRequestBody = deserializeCreateFileUploadSessionForExistingFileRequestBody;
exports.serializeCreateFileUploadSessionCommitByUrlRequestBody = serializeCreateFileUploadSessionCommitByUrlRequestBody;
exports.deserializeCreateFileUploadSessionCommitByUrlRequestBody = deserializeCreateFileUploadSessionCommitByUrlRequestBody;
exports.serializeCreateFileUploadSessionCommitRequestBody = serializeCreateFileUploadSessionCommitRequestBody;
exports.deserializeCreateFileUploadSessionCommitRequestBody = deserializeCreateFileUploadSessionCommitRequestBody;
const uploadSession_1 = require("../schemas/uploadSession");
const uploadedPart_1 = require("../schemas/uploadedPart");
const uploadParts_1 = require("../schemas/uploadParts");
const files_1 = require("../schemas/files");
const uploadPart_1 = require("../schemas/uploadPart");
const uploadPart_2 = require("../schemas/uploadPart");
const errors_1 = require("../box/errors");
const network_1 = require("../networking/network");
const fetchOptions_1 = require("../networking/fetchOptions");
const utils_1 = require("../internal/utils");
const utils_2 = require("../internal/utils");
const utils_3 = require("../internal/utils");
const utils_4 = require("../internal/utils");
const utils_5 = require("../internal/utils");
const utils_6 = require("../internal/utils");
const utils_7 = require("../internal/utils");
const utils_8 = require("../internal/utils");
const utils_9 = require("../internal/utils");
const json_1 = require("../serialization/json");
const json_2 = require("../serialization/json");
const json_3 = require("../serialization/json");
const json_4 = require("../serialization/json");
class CreateFileUploadSessionOptionals {
headers = new CreateFileUploadSessionHeaders({});
cancellationToken = void 0;
constructor(fields) {
if (fields.headers !== undefined) {
this.headers = fields.headers;
}
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.CreateFileUploadSessionOptionals = CreateFileUploadSessionOptionals;
class CreateFileUploadSessionForExistingFileOptionals {
headers = new CreateFileUploadSessionForExistingFileHeaders({});
cancellationToken = void 0;
constructor(fields) {
if (fields.headers !== undefined) {
this.headers = fields.headers;
}
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.CreateFileUploadSessionForExistingFileOptionals = CreateFileUploadSessionForExistingFileOptionals;
class GetFileUploadSessionByUrlOptionals {
headers = new GetFileUploadSessionByUrlHeaders({});
cancellationToken = void 0;
constructor(fields) {
if (fields.headers !== undefined) {
this.headers = fields.headers;
}
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.GetFileUploadSessionByUrlOptionals = GetFileUploadSessionByUrlOptionals;
class GetFileUploadSessionByIdOptionals {
headers = new GetFileUploadSessionByIdHeaders({});
cancellationToken = void 0;
constructor(fields) {
if (fields.headers !== undefined) {
this.headers = fields.headers;
}
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.GetFileUploadSessionByIdOptionals = GetFileUploadSessionByIdOptionals;
class UploadFilePartByUrlOptionals {
cancellationToken = void 0;
constructor(fields) {
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.UploadFilePartByUrlOptionals = UploadFilePartByUrlOptionals;
class UploadFilePartOptionals {
cancellationToken = void 0;
constructor(fields) {
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.UploadFilePartOptionals = UploadFilePartOptionals;
class DeleteFileUploadSessionByUrlOptionals {
headers = new DeleteFileUploadSessionByUrlHeaders({});
cancellationToken = void 0;
constructor(fields) {
if (fields.headers !== undefined) {
this.headers = fields.headers;
}
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.DeleteFileUploadSessionByUrlOptionals = DeleteFileUploadSessionByUrlOptionals;
class DeleteFileUploadSessionByIdOptionals {
headers = new DeleteFileUploadSessionByIdHeaders({});
cancellationToken = void 0;
constructor(fields) {
if (fields.headers !== undefined) {
this.headers = fields.headers;
}
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.DeleteFileUploadSessionByIdOptionals = DeleteFileUploadSessionByIdOptionals;
class GetFileUploadSessionPartsByUrlOptionals {
queryParams = {};
headers = new GetFileUploadSessionPartsByUrlHeaders({});
cancellationToken = void 0;
constructor(fields) {
if (fields.queryParams !== undefined) {
this.queryParams = fields.queryParams;
}
if (fields.headers !== undefined) {
this.headers = fields.headers;
}
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.GetFileUploadSessionPartsByUrlOptionals = GetFileUploadSessionPartsByUrlOptionals;
class GetFileUploadSessionPartsOptionals {
queryParams = {};
headers = new GetFileUploadSessionPartsHeaders({});
cancellationToken = void 0;
constructor(fields) {
if (fields.queryParams !== undefined) {
this.queryParams = fields.queryParams;
}
if (fields.headers !== undefined) {
this.headers = fields.headers;
}
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.GetFileUploadSessionPartsOptionals = GetFileUploadSessionPartsOptionals;
class CreateFileUploadSessionCommitByUrlOptionals {
cancellationToken = void 0;
constructor(fields) {
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.CreateFileUploadSessionCommitByUrlOptionals = CreateFileUploadSessionCommitByUrlOptionals;
class CreateFileUploadSessionCommitOptionals {
cancellationToken = void 0;
constructor(fields) {
if (fields.cancellationToken !== undefined) {
this.cancellationToken = fields.cancellationToken;
}
}
}
exports.CreateFileUploadSessionCommitOptionals = CreateFileUploadSessionCommitOptionals;
class CreateFileUploadSessionHeaders {
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.CreateFileUploadSessionHeaders = CreateFileUploadSessionHeaders;
class CreateFileUploadSessionForExistingFileHeaders {
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.CreateFileUploadSessionForExistingFileHeaders = CreateFileUploadSessionForExistingFileHeaders;
class GetFileUploadSessionByUrlHeaders {
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.GetFileUploadSessionByUrlHeaders = GetFileUploadSessionByUrlHeaders;
class GetFileUploadSessionByIdHeaders {
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.GetFileUploadSessionByIdHeaders = GetFileUploadSessionByIdHeaders;
class UploadFilePartByUrlHeaders {
/**
* The [RFC3230][1] message digest of the chunk uploaded.
*
* Only SHA1 is supported. The SHA1 digest must be base64
* encoded. The format of this header is as
* `sha=BASE64_ENCODED_DIGEST`.
*
* To get the value for the `SHA` digest, use the
* openSSL command to encode the file part:
* `openssl sha1 -binary <FILE_PART_NAME> | base64`.
*
* [1]: https://tools.ietf.org/html/rfc3230 */
digest;
/**
* The byte range of the chunk.
*
* Must not overlap with the range of a part already
* uploaded this session. Each part’s size must be
* exactly equal in size to the part size specified
* in the upload session that you created.
* One exception is the last part of the file, as this can be smaller.
*
* When providing the value for `content-range`, remember that:
*
* * The lower bound of each part's byte range
* must be a multiple of the part size.
* * The higher bound must be a multiple of the part size - 1. */
contentRange;
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.digest !== undefined) {
this.digest = fields.digest;
}
if (fields.contentRange !== undefined) {
this.contentRange = fields.contentRange;
}
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.UploadFilePartByUrlHeaders = UploadFilePartByUrlHeaders;
class UploadFilePartHeaders {
/**
* The [RFC3230][1] message digest of the chunk uploaded.
*
* Only SHA1 is supported. The SHA1 digest must be base64
* encoded. The format of this header is as
* `sha=BASE64_ENCODED_DIGEST`.
*
* To get the value for the `SHA` digest, use the
* openSSL command to encode the file part:
* `openssl sha1 -binary <FILE_PART_NAME> | base64`.
*
* [1]: https://tools.ietf.org/html/rfc3230 */
digest;
/**
* The byte range of the chunk.
*
* Must not overlap with the range of a part already
* uploaded this session. Each part’s size must be
* exactly equal in size to the part size specified
* in the upload session that you created.
* One exception is the last part of the file, as this can be smaller.
*
* When providing the value for `content-range`, remember that:
*
* * The lower bound of each part's byte range
* must be a multiple of the part size.
* * The higher bound must be a multiple of the part size - 1. */
contentRange;
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.digest !== undefined) {
this.digest = fields.digest;
}
if (fields.contentRange !== undefined) {
this.contentRange = fields.contentRange;
}
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.UploadFilePartHeaders = UploadFilePartHeaders;
class DeleteFileUploadSessionByUrlHeaders {
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.DeleteFileUploadSessionByUrlHeaders = DeleteFileUploadSessionByUrlHeaders;
class DeleteFileUploadSessionByIdHeaders {
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.DeleteFileUploadSessionByIdHeaders = DeleteFileUploadSessionByIdHeaders;
class GetFileUploadSessionPartsByUrlHeaders {
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.GetFileUploadSessionPartsByUrlHeaders = GetFileUploadSessionPartsByUrlHeaders;
class GetFileUploadSessionPartsHeaders {
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.GetFileUploadSessionPartsHeaders = GetFileUploadSessionPartsHeaders;
class CreateFileUploadSessionCommitByUrlHeaders {
/**
* The [RFC3230][1] message digest of the whole file.
*
* Only SHA1 is supported. The SHA1 digest must be Base64
* encoded. The format of this header is as
* `sha=BASE64_ENCODED_DIGEST`.
*
* [1]: https://tools.ietf.org/html/rfc3230 */
digest;
/**
* Ensures this item hasn't recently changed before
* making changes.
*
* Pass in the item's last observed `etag` value
* into this header and the endpoint will fail
* with a `412 Precondition Failed` if it
* has changed since. */
ifMatch;
/**
* Ensures an item is only returned if it has changed.
*
* Pass in the item's last observed `etag` value
* into this header and the endpoint will fail
* with a `304 Not Modified` if the item has not
* changed since. */
ifNoneMatch;
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.digest !== undefined) {
this.digest = fields.digest;
}
if (fields.ifMatch !== undefined) {
this.ifMatch = fields.ifMatch;
}
if (fields.ifNoneMatch !== undefined) {
this.ifNoneMatch = fields.ifNoneMatch;
}
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.CreateFileUploadSessionCommitByUrlHeaders = CreateFileUploadSessionCommitByUrlHeaders;
class CreateFileUploadSessionCommitHeaders {
/**
* The [RFC3230][1] message digest of the whole file.
*
* Only SHA1 is supported. The SHA1 digest must be Base64
* encoded. The format of this header is as
* `sha=BASE64_ENCODED_DIGEST`.
*
* [1]: https://tools.ietf.org/html/rfc3230 */
digest;
/**
* Ensures this item hasn't recently changed before
* making changes.
*
* Pass in the item's last observed `etag` value
* into this header and the endpoint will fail
* with a `412 Precondition Failed` if it
* has changed since. */
ifMatch;
/**
* Ensures an item is only returned if it has changed.
*
* Pass in the item's last observed `etag` value
* into this header and the endpoint will fail
* with a `304 Not Modified` if the item has not
* changed since. */
ifNoneMatch;
/**
* Extra headers that will be included in the HTTP request. */
extraHeaders = {};
constructor(fields) {
if (fields.digest !== undefined) {
this.digest = fields.digest;
}
if (fields.ifMatch !== undefined) {
this.ifMatch = fields.ifMatch;
}
if (fields.ifNoneMatch !== undefined) {
this.ifNoneMatch = fields.ifNoneMatch;
}
if (fields.extraHeaders !== undefined) {
this.extraHeaders = fields.extraHeaders;
}
}
}
exports.CreateFileUploadSessionCommitHeaders = CreateFileUploadSessionCommitHeaders;
class ChunkedUploadsManager {
auth;
networkSession = new network_1.NetworkSession({});
constructor(fields) {
if (fields.auth !== undefined) {
this.auth = fields.auth;
}
if (fields.networkSession !== undefined) {
this.networkSession = fields.networkSession;
}
}
/**
* Creates an upload session for a new file.
* @param {CreateFileUploadSessionRequestBody} requestBody Request body of createFileUploadSession method
* @param {CreateFileUploadSessionOptionalsInput} optionalsInput
* @returns {Promise<UploadSession>}
*/
async createFileUploadSession(requestBody, optionalsInput = {}) {
const optionals = new CreateFileUploadSessionOptionals({
headers: optionalsInput.headers,
cancellationToken: optionalsInput.cancellationToken,
});
const headers = optionals.headers;
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({ ...{}, ...headers.extraHeaders });
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: ''.concat(this.networkSession.baseUrls.uploadUrl, '/2.0/files/upload_sessions'),
method: 'POST',
headers: headersMap,
data: serializeCreateFileUploadSessionRequestBody(requestBody),
contentType: 'application/json',
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return {
...(0, uploadSession_1.deserializeUploadSession)(response.data),
rawData: response.data,
};
}
/**
* Creates an upload session for an existing file.
* @param {string} fileId The unique identifier that represents a file.
The ID for any file can be determined
by visiting a file in the web application
and copying the ID from the URL. For example,
for the URL `https://*.app.box.com/files/123`
the `file_id` is `123`.
Example: "12345"
* @param {CreateFileUploadSessionForExistingFileRequestBody} requestBody Request body of createFileUploadSessionForExistingFile method
* @param {CreateFileUploadSessionForExistingFileOptionalsInput} optionalsInput
* @returns {Promise<UploadSession>}
*/
async createFileUploadSessionForExistingFile(fileId, requestBody, optionalsInput = {}) {
const optionals = new CreateFileUploadSessionForExistingFileOptionals({
headers: optionalsInput.headers,
cancellationToken: optionalsInput.cancellationToken,
});
const headers = optionals.headers;
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({ ...{}, ...headers.extraHeaders });
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: ''.concat(this.networkSession.baseUrls.uploadUrl, '/2.0/files/', (0, utils_2.toString)(fileId), '/upload_sessions'),
method: 'POST',
headers: headersMap,
data: serializeCreateFileUploadSessionForExistingFileRequestBody(requestBody),
contentType: 'application/json',
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return {
...(0, uploadSession_1.deserializeUploadSession)(response.data),
rawData: response.data,
};
}
/**
* Using this method with urls provided in response when creating a new upload session is preferred to use over GetFileUploadSessionById method.
* This allows to always upload your content to the closest Box data center and can significantly improve upload speed.
* Return information about an upload session.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions) endpoint.
* @param {string} url URL of getFileUploadSessionById method
* @param {GetFileUploadSessionByUrlOptionalsInput} optionalsInput
* @returns {Promise<UploadSession>}
*/
async getFileUploadSessionByUrl(url, optionalsInput = {}) {
const optionals = new GetFileUploadSessionByUrlOptionals({
headers: optionalsInput.headers,
cancellationToken: optionalsInput.cancellationToken,
});
const headers = optionals.headers;
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({ ...{}, ...headers.extraHeaders });
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: url,
method: 'GET',
headers: headersMap,
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return {
...(0, uploadSession_1.deserializeUploadSession)(response.data),
rawData: response.data,
};
}
/**
* Return information about an upload session.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions) endpoint.
* @param {string} uploadSessionId The ID of the upload session.
Example: "D5E3F7A"
* @param {GetFileUploadSessionByIdOptionalsInput} optionalsInput
* @returns {Promise<UploadSession>}
*/
async getFileUploadSessionById(uploadSessionId, optionalsInput = {}) {
const optionals = new GetFileUploadSessionByIdOptionals({
headers: optionalsInput.headers,
cancellationToken: optionalsInput.cancellationToken,
});
const headers = optionals.headers;
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({ ...{}, ...headers.extraHeaders });
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: ''.concat(this.networkSession.baseUrls.uploadUrl, '/2.0/files/upload_sessions/', (0, utils_2.toString)(uploadSessionId)),
method: 'GET',
headers: headersMap,
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return {
...(0, uploadSession_1.deserializeUploadSession)(response.data),
rawData: response.data,
};
}
/**
* Using this method with urls provided in response when creating a new upload session is preferred to use over UploadFilePart method.
* This allows to always upload your content to the closest Box data center and can significantly improve upload speed.
* Uploads a chunk of a file for an upload session.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions)
* and [`Get upload session`](https://developer.box.com/reference/get-files-upload-sessions-id) endpoints.
* @param {string} url URL of uploadFilePart method
* @param {ByteStream} requestBody Request body of uploadFilePart method
* @param {UploadFilePartByUrlHeadersInput} headersInput Headers of uploadFilePart method
* @param {UploadFilePartByUrlOptionalsInput} optionalsInput
* @returns {Promise<UploadedPart>}
*/
async uploadFilePartByUrl(url, requestBody, headersInput, optionalsInput = {}) {
const headers = new UploadFilePartByUrlHeaders({
digest: headersInput.digest,
contentRange: headersInput.contentRange,
extraHeaders: headersInput.extraHeaders,
});
const optionals = new UploadFilePartByUrlOptionals({
cancellationToken: optionalsInput.cancellationToken,
});
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({
...{
['digest']: (0, utils_2.toString)(headers.digest),
['content-range']: (0, utils_2.toString)(headers.contentRange),
},
...headers.extraHeaders,
});
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: url,
method: 'PUT',
headers: headersMap,
fileStream: requestBody,
contentType: 'application/octet-stream',
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return {
...(0, uploadedPart_1.deserializeUploadedPart)(response.data),
rawData: response.data,
};
}
/**
* Uploads a chunk of a file for an upload session.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions)
* and [`Get upload session`](https://developer.box.com/reference/get-files-upload-sessions-id) endpoints.
* @param {string} uploadSessionId The ID of the upload session.
Example: "D5E3F7A"
* @param {ByteStream} requestBody Request body of uploadFilePart method
* @param {UploadFilePartHeadersInput} headersInput Headers of uploadFilePart method
* @param {UploadFilePartOptionalsInput} optionalsInput
* @returns {Promise<UploadedPart>}
*/
async uploadFilePart(uploadSessionId, requestBody, headersInput, optionalsInput = {}) {
const headers = new UploadFilePartHeaders({
digest: headersInput.digest,
contentRange: headersInput.contentRange,
extraHeaders: headersInput.extraHeaders,
});
const optionals = new UploadFilePartOptionals({
cancellationToken: optionalsInput.cancellationToken,
});
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({
...{
['digest']: (0, utils_2.toString)(headers.digest),
['content-range']: (0, utils_2.toString)(headers.contentRange),
},
...headers.extraHeaders,
});
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: ''.concat(this.networkSession.baseUrls.uploadUrl, '/2.0/files/upload_sessions/', (0, utils_2.toString)(uploadSessionId)),
method: 'PUT',
headers: headersMap,
fileStream: requestBody,
contentType: 'application/octet-stream',
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return {
...(0, uploadedPart_1.deserializeUploadedPart)(response.data),
rawData: response.data,
};
}
/**
* Using this method with urls provided in response when creating a new upload session is preferred to use over DeleteFileUploadSessionById method.
* This allows to always upload your content to the closest Box data center and can significantly improve upload speed.
* Abort an upload session and discard all data uploaded.
*
* This cannot be reversed.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions)
* and [`Get upload session`](https://developer.box.com/reference/get-files-upload-sessions-id) endpoints.
* @param {string} url URL of deleteFileUploadSessionById method
* @param {DeleteFileUploadSessionByUrlOptionalsInput} optionalsInput
* @returns {Promise<undefined>}
*/
async deleteFileUploadSessionByUrl(url, optionalsInput = {}) {
const optionals = new DeleteFileUploadSessionByUrlOptionals({
headers: optionalsInput.headers,
cancellationToken: optionalsInput.cancellationToken,
});
const headers = optionals.headers;
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({ ...{}, ...headers.extraHeaders });
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: url,
method: 'DELETE',
headers: headersMap,
responseFormat: 'no_content',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return void 0;
}
/**
* Abort an upload session and discard all data uploaded.
*
* This cannot be reversed.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions)
* and [`Get upload session`](https://developer.box.com/reference/get-files-upload-sessions-id) endpoints.
* @param {string} uploadSessionId The ID of the upload session.
Example: "D5E3F7A"
* @param {DeleteFileUploadSessionByIdOptionalsInput} optionalsInput
* @returns {Promise<undefined>}
*/
async deleteFileUploadSessionById(uploadSessionId, optionalsInput = {}) {
const optionals = new DeleteFileUploadSessionByIdOptionals({
headers: optionalsInput.headers,
cancellationToken: optionalsInput.cancellationToken,
});
const headers = optionals.headers;
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({ ...{}, ...headers.extraHeaders });
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: ''.concat(this.networkSession.baseUrls.uploadUrl, '/2.0/files/upload_sessions/', (0, utils_2.toString)(uploadSessionId)),
method: 'DELETE',
headers: headersMap,
responseFormat: 'no_content',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return void 0;
}
/**
* Using this method with urls provided in response when creating a new upload session is preferred to use over GetFileUploadSessionParts method.
* This allows to always upload your content to the closest Box data center and can significantly improve upload speed.
* Return a list of the chunks uploaded to the upload session so far.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions)
* and [`Get upload session`](https://developer.box.com/reference/get-files-upload-sessions-id) endpoints.
* @param {string} url URL of getFileUploadSessionParts method
* @param {GetFileUploadSessionPartsByUrlOptionalsInput} optionalsInput
* @returns {Promise<UploadParts>}
*/
async getFileUploadSessionPartsByUrl(url, optionalsInput = {}) {
const optionals = new GetFileUploadSessionPartsByUrlOptionals({
queryParams: optionalsInput.queryParams,
headers: optionalsInput.headers,
cancellationToken: optionalsInput.cancellationToken,
});
const queryParams = optionals.queryParams;
const headers = optionals.headers;
const cancellationToken = optionals.cancellationToken;
const queryParamsMap = (0, utils_1.prepareParams)({
['offset']: (0, utils_2.toString)(queryParams.offset),
['limit']: (0, utils_2.toString)(queryParams.limit),
});
const headersMap = (0, utils_1.prepareParams)({ ...{}, ...headers.extraHeaders });
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: url,
method: 'GET',
params: queryParamsMap,
headers: headersMap,
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return {
...(0, uploadParts_1.deserializeUploadParts)(response.data),
rawData: response.data,
};
}
/**
* Return a list of the chunks uploaded to the upload session so far.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions)
* and [`Get upload session`](https://developer.box.com/reference/get-files-upload-sessions-id) endpoints.
* @param {string} uploadSessionId The ID of the upload session.
Example: "D5E3F7A"
* @param {GetFileUploadSessionPartsOptionalsInput} optionalsInput
* @returns {Promise<UploadParts>}
*/
async getFileUploadSessionParts(uploadSessionId, optionalsInput = {}) {
const optionals = new GetFileUploadSessionPartsOptionals({
queryParams: optionalsInput.queryParams,
headers: optionalsInput.headers,
cancellationToken: optionalsInput.cancellationToken,
});
const queryParams = optionals.queryParams;
const headers = optionals.headers;
const cancellationToken = optionals.cancellationToken;
const queryParamsMap = (0, utils_1.prepareParams)({
['offset']: (0, utils_2.toString)(queryParams.offset),
['limit']: (0, utils_2.toString)(queryParams.limit),
});
const headersMap = (0, utils_1.prepareParams)({ ...{}, ...headers.extraHeaders });
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: ''.concat(this.networkSession.baseUrls.uploadUrl, '/2.0/files/upload_sessions/', (0, utils_2.toString)(uploadSessionId), '/parts'),
method: 'GET',
params: queryParamsMap,
headers: headersMap,
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
return {
...(0, uploadParts_1.deserializeUploadParts)(response.data),
rawData: response.data,
};
}
/**
* Using this method with urls provided in response when creating a new upload session is preferred to use over CreateFileUploadSessionCommit method.
* This allows to always upload your content to the closest Box data center and can significantly improve upload speed.
* Close an upload session and create a file from the uploaded chunks.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions)
* and [`Get upload session`](https://developer.box.com/reference/get-files-upload-sessions-id) endpoints.
* @param {string} url URL of createFileUploadSessionCommit method
* @param {CreateFileUploadSessionCommitByUrlRequestBody} requestBody Request body of createFileUploadSessionCommit method
* @param {CreateFileUploadSessionCommitByUrlHeadersInput} headersInput Headers of createFileUploadSessionCommit method
* @param {CreateFileUploadSessionCommitByUrlOptionalsInput} optionalsInput
* @returns {Promise<undefined | Files>}
*/
async createFileUploadSessionCommitByUrl(url, requestBody, headersInput, optionalsInput = {}) {
const headers = new CreateFileUploadSessionCommitByUrlHeaders({
digest: headersInput.digest,
ifMatch: headersInput.ifMatch,
ifNoneMatch: headersInput.ifNoneMatch,
extraHeaders: headersInput.extraHeaders,
});
const optionals = new CreateFileUploadSessionCommitByUrlOptionals({
cancellationToken: optionalsInput.cancellationToken,
});
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({
...{
['digest']: (0, utils_2.toString)(headers.digest),
['if-match']: (0, utils_2.toString)(headers.ifMatch),
['if-none-match']: (0, utils_2.toString)(headers.ifNoneMatch),
},
...headers.extraHeaders,
});
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: url,
method: 'POST',
headers: headersMap,
data: serializeCreateFileUploadSessionCommitRequestBody(requestBody),
contentType: 'application/json',
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
if ((0, utils_2.toString)(response.status) == '202') {
return void 0;
}
return {
...(0, files_1.deserializeFiles)(response.data),
rawData: response.data,
};
}
/**
* Close an upload session and create a file from the uploaded chunks.
*
* The actual endpoint URL is returned by the [`Create upload session`](https://developer.box.com/reference/post-files-upload-sessions)
* and [`Get upload session`](https://developer.box.com/reference/get-files-upload-sessions-id) endpoints.
* @param {string} uploadSessionId The ID of the upload session.
Example: "D5E3F7A"
* @param {CreateFileUploadSessionCommitRequestBody} requestBody Request body of createFileUploadSessionCommit method
* @param {CreateFileUploadSessionCommitHeadersInput} headersInput Headers of createFileUploadSessionCommit method
* @param {CreateFileUploadSessionCommitOptionalsInput} optionalsInput
* @returns {Promise<undefined | Files>}
*/
async createFileUploadSessionCommit(uploadSessionId, requestBody, headersInput, optionalsInput = {}) {
const headers = new CreateFileUploadSessionCommitHeaders({
digest: headersInput.digest,
ifMatch: headersInput.ifMatch,
ifNoneMatch: headersInput.ifNoneMatch,
extraHeaders: headersInput.extraHeaders,
});
const optionals = new CreateFileUploadSessionCommitOptionals({
cancellationToken: optionalsInput.cancellationToken,
});
const cancellationToken = optionals.cancellationToken;
const headersMap = (0, utils_1.prepareParams)({
...{
['digest']: (0, utils_2.toString)(headers.digest),
['if-match']: (0, utils_2.toString)(headers.ifMatch),
['if-none-match']: (0, utils_2.toString)(headers.ifNoneMatch),
},
...headers.extraHeaders,
});
const response = await this.networkSession.networkClient.fetch(new fetchOptions_1.FetchOptions({
url: ''.concat(this.networkSession.baseUrls.uploadUrl, '/2.0/files/upload_sessions/', (0, utils_2.toString)(uploadSessionId), '/commit'),
method: 'POST',
headers: headersMap,
data: serializeCreateFileUploadSessionCommitRequestBody(requestBody),
contentType: 'application/json',
responseFormat: 'json',
auth: this.auth,
networkSession: this.networkSession,
cancellationToken: cancellationToken,
}));
if ((0, utils_2.toString)(response.status) == '202') {
return void 0;
}
return {
...(0, files_1.deserializeFiles)(response.data),
rawData: response.data,
};
}
/**
* @param {PartAccumulator} acc
* @param {ByteStream} chunk
* @returns {Promise<PartAccumulator>}
*/
async reducer(acc, chunk) {
const lastIndex = acc.lastIndex;
const parts = acc.parts;
const chunkBuffer = await (0, utils_6.readByteStream)(chunk);
const hash = new utils_8.Hash({ algorithm: 'sha1' });
await hash.updateHash(chunkBuffer);
const sha1 = await hash.digestHash('base64');
const digest = ''.concat('sha=', sha1);
const chunkSize = (0, utils_9.bufferLength)(chunkBuffer);
const bytesStart = lastIndex + 1;
const bytesEnd = lastIndex + chunkSize;
const contentRange = ''.concat('bytes ', (0, utils_2.toString)(bytesStart), '-', (0, utils_2.toString)(bytesEnd), '/', (0, utils_2.toString)(acc.fileSize));
const uploadedPart = await this.uploadFilePartByUrl(acc.uploadPartUrl, (0, utils_3.generateByteStreamFromBuffer)(chunkBuffer), {
digest: digest,
contentRange: contentRange,
});
const part = uploadedPart.part;
const partSha1 = (0, utils_4.hexToBase64)(part.sha1);
if (!(partSha1 == sha1)) {
throw new Error('Assertion failed');
}
if (!(part.size == chunkSize)) {
throw new Error('Assertion failed');
}
if (!(part.offset == bytesStart)) {
throw new Error('Assertion failed');
}
await acc.fileHash.updateHash(chunkBuffer);
return {
lastIndex: bytesEnd,
parts: parts.concat([part]),
fileSize: acc.fileSize,
uploadPartUrl: acc.uploadPartUrl,
fileHash: acc.fileHash,
};
}
/**
* Starts the process of chunk uploading a big file. Should return a File object representing uploaded file.
* @param {ByteStream} file The stream of the file to upload.
* @param {string} fileName The name of the file, which will be used for storage in Box.
* @param {number} fileSize The total size of the file for the chunked upload in bytes.
* @param {string} parentFolderId The ID of the folder where the file should be uploaded.
* @param {CancellationToken} cancellationToken Token used for request cancellation.
* @returns {Promise<FileFull>}
*/
async uploadBigFile(file, fileName, fileSize, parentFolderId, cancellationToken) {
const uploadSession = await this.createFileUploadSession({
fileName: fileName,
fileSize: fileSize,
folderId: parentFolderId,
}, {
headers: new CreateFileUploadSessionHeaders({}),
cancellationToken: cancellationToken,
});
const uploadPartUrl = uploadSession.sessionEndpoints.uploadPart;
const commitUrl = uploadSession.sessionEndpoints.commit;
const listPartsUrl = uploadSession.sessionEndpoints.listParts;
const partSize = uploadSession.partSize;
const totalParts = uploadSession.totalParts;
if (!(partSize * totalParts >= fileSize)) {
throw new Error('Assertion failed');
}
if (!(uploadSession.numPartsProcessed == 0)) {
throw new Error('Assertion failed');
}
const fileHash = new utils_8.Hash({ algorithm: 'sha1' });
const chunksIterator = (0, utils_5.iterateChunks)(file, partSize, fileSize);
const results = await (0, utils_7.reduceIterator)(chunksIterator, this.reducer.bind(this), {
lastIndex: -1,
parts: [],
fileSize: fileSize,
uploadPartUrl: uploadPartUrl,
fileHash: fileHash,
});
const parts = results.parts;
const processedSessionParts = await this.getFileUploadSessionPartsByUrl(listPartsUrl, {
queryParams: {},
headers: new GetFileUploadSessionPartsByUrlHeaders({}),
cancellationToken: cancellationToken,
});
if (!(processedSessionParts.totalCount == totalParts)) {
throw new Error('Assertion failed');
}
const sha1 = await fileHash.digestHash('base64');
const digest = ''.concat('sha=', sha1);
const committedSession = await this.createFileUploadSessionCommitByUrl(commitUrl, {
parts: parts,
}, {
digest: digest,
}, {
cancellationToken: cancellationToken,
});
return committedSession.entries[0];
}
}
exports.ChunkedUploadsManager = ChunkedUploadsManager;
function serializeCreateFileUploadSessionRequestBody(val) {
return {
['folder_id']: val.folderId,
['file_size']: val.fileSize,
['file_name']: val.fileName,
};
}
function deserializeCreateFileUploadSessionRequestBody(val) {
if (!(0, json_4.sdIsMap)(val)) {
throw new errors_1.BoxSdkError({
message: 'Expecting a map for "CreateFileUploadSessionRequestBody"',
});
}
if (val.folder_id == void 0) {
throw new errors_1.BoxSdkError({
message: 'Expecting "folder_id" of type "CreateFileUploadSessionRequestBody" to be defined',
});
}
if (!(0, json_2.sdIsString)(val.folder_id)) {
throw new errors_1.BoxSdkError({
message: 'Expecting string for "folder_id" of type "CreateFileUploadSessionRequestBody"',
});
}
const folderId = val.folder_id;
if (val.file_size == void 0) {
throw new errors_1.BoxSdkError({
message: 'Expecting "file_size" of type "CreateFileUploadSessionRequestBody" to be defined',
});
}
if (!(0, json_1.sdIsNumber)(val.file_size)) {
throw new errors_1.BoxSdkError({
message: 'Expecting number for "file_size" of type "CreateFileUploadSessionRequestBody"',
});
}
const fileSize = val.file_size;
if (val.file_name == void 0) {
throw new errors_1.BoxSdkError({
message: 'Expecting "file_name" of type "CreateFileUploadSessionRequestBody" to be defined',
});
}
if (!(0, json_2.sdIsString)(val.file_name)) {
throw new errors_1.BoxSdkError({
message: 'Expecting string for "file_name" of type "CreateFileUploadSessionRequestBody"',
});
}
const fileName = val.file_name;
return {
folderId: folderId,
fileSize: fileSize,
fileName: fileName,
};
}
function serializeCreateFileUploadSessionForExistingFileRequestBody(val) {
return { ['file_size']: val.fileSize, ['file_name']: val.fileName };
}
function deserializeCreateFileUploadSessionForExistingFileRequestBody(val) {
if (!(0, json_4.sdIsMap)(val)) {
throw new errors_1.BoxSdkError({
message: 'Expecting a map for "CreateFileUploadSessionForExistingFileRequestBody"',
});
}
if (val.file_size == void 0) {
throw new errors_1.BoxSdkError({
message: 'Expecting "file_size" of type "CreateFileUploadSessionForExistingFileRequestBody" to be defined',
});
}
if (!(0, json_1.sdIsNumber)(val.file_size)) {
throw new errors_1.BoxSdkError({
message: 'Expecting number for "file_size" of type "CreateFileUploadSessionForExistingFileRequestBody"',
});
}
const fileSize = val.file_size;
if (!(val.file_name == void 0) && !(0, json_2.sdIsString)(val.file_name)) {
throw new errors_1.BoxSdkError({
message: 'Expecting string for "file_name" of type "CreateFileUploadSessionForExistingFileRequestBody"',
});
}
const fileName = val.file_name == void 0 ? void 0 : val.file_name;
return {
fileSize: fileSize,
fileName: fileName,
};
}
function serializeCreateFileUploadSessionCommitByUrlRequestBody(val) {
return {
['parts']: val.parts.map(function (item) {
return (0, uploadPart_1.serializeUploadPart)(item);
}),
};
}
function deserializeCreateFileUploadSessionCommitByUrlRequestBody(val) {
if (!(0