@aws-amplify/storage
Version:
Storage category of aws-amplify
91 lines (89 loc) • 3.22 kB
JavaScript
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
Object.defineProperty(exports, "__esModule", { value: true });
exports.uploadData = void 0;
const utils_1 = require("../../utils");
const assertValidationError_1 = require("../../../../errors/utils/assertValidationError");
const validation_1 = require("../../../../errors/types/validation");
const constants_1 = require("../../utils/constants");
const byteLength_1 = require("./byteLength");
const putObjectJob_1 = require("./putObjectJob");
const multipart_1 = require("./multipart");
/**
* Upload data to specified S3 object. By default, it uses single PUT operation to upload if the data is less than 5MB.
* Otherwise, it uses multipart upload to upload the data. If the data length is unknown, it uses multipart upload.
*
* Limitations:
* * Maximum object size is 5TB.
* * Maximum object size if the size cannot be determined before upload is 50GB.
*
* @param input - The UploadDataInput object.
* @returns A cancelable and resumable task exposing result promise from `result`
* property.
* @throws service: {@link S3Exception} - thrown when checking for existence of the object
* @throws validation: {@link StorageValidationErrorCode } - Validation errors.
*
* @example
* ```ts
* // Upload a file to s3 bucket
* await uploadData({ key, data: file, options: {
* onProgress, // Optional progress callback.
* } }).result;
* ```
* @example
* ```ts
* // Cancel a task
* const uploadTask = uploadData({ key, data: file });
* //...
* uploadTask.cancel();
* try {
* await uploadTask.result;
* } catch (error) {
* if(isCancelError(error)) {
* // Handle error thrown by task cancelation.
* }
* }
*```
*
* @example
* ```ts
* // Pause and resume a task
* const uploadTask = uploadData({ key, data: file });
* //...
* uploadTask.pause();
* //...
* uploadTask.resume();
* //...
* await uploadTask.result;
* ```
*/
const uploadData = (input) => {
const { data } = input;
const dataByteLength = (0, byteLength_1.byteLength)(data);
(0, assertValidationError_1.assertValidationError)(dataByteLength === undefined || dataByteLength <= constants_1.MAX_OBJECT_SIZE, validation_1.StorageValidationErrorCode.ObjectIsTooLarge);
if (dataByteLength && dataByteLength <= constants_1.DEFAULT_PART_SIZE) {
const abortController = new AbortController();
return (0, utils_1.createUploadTask)({
isMultipartUpload: false,
job: (0, putObjectJob_1.putObjectJob)(input, abortController.signal, dataByteLength),
onCancel: (message) => {
abortController.abort(message);
},
});
}
else {
const { multipartUploadJob, onPause, onResume, onCancel } = (0, multipart_1.getMultipartUploadHandlers)(input, dataByteLength);
return (0, utils_1.createUploadTask)({
isMultipartUpload: true,
job: multipartUploadJob,
onCancel: (message) => {
onCancel(message);
},
onPause,
onResume,
});
}
};
exports.uploadData = uploadData;
//# sourceMappingURL=index.js.map
;