amplify-s3-chunk-upload
Version:
A custom storage upload plugin for AWS Amplify. Instead of reading file completely in memory, it helps to read file chunk by chunk.
189 lines (188 loc) • 7.69 kB
JavaScript
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.StorageChunkUpload = void 0;
const core_1 = require("@aws-amplify/core");
const storage_1 = require("@aws-amplify/storage");
const events = require("events");
const StorageChunkManagedUpload_1 = require("./StorageChunkManagedUpload");
const logger = new core_1.Logger('StorageChunkUpload');
const AMPLIFY_SYMBOL = (typeof Symbol !== 'undefined' && typeof Symbol.for === 'function'
? Symbol.for('amplify_default')
: '@@amplify_default');
const dispatchStorageEvent = (track, event, attrs, metrics, message) => {
if (track) {
const data = { attrs };
if (metrics) {
data['metrics'] = metrics;
}
core_1.Hub.dispatch('storage', {
event,
data,
message,
}, 'Storage', AMPLIFY_SYMBOL);
}
};
class StorageChunkUpload extends storage_1.AWSS3Provider {
constructor(config, credentials) {
super(config);
this.credentials = credentials;
this._storageConfig = config ? config : {};
logger.debug('Storage Options', this._storageConfig);
}
/**
* Configure Storage part with aws configuration
* @param {Object} config - Configuration of the Storage
* @return {Object} - Current configuration
*/
configure(config) {
logger.debug('configure Storage', config);
if (!config)
return this._storageConfig;
const amplifyConfig = core_1.Parser.parseMobilehubConfig(config);
this._storageConfig = Object.assign({}, this._storageConfig, amplifyConfig.Storage['AWSS3']);
if (!this._storageConfig.bucket) {
logger.debug('Do not have bucket yet');
}
return this._storageConfig;
}
// upload storage object
put(key, object, config) {
return __awaiter(this, void 0, void 0, function* () {
const credentialsOK = yield this._ensureCredentials();
if (!credentialsOK) {
return Promise.reject('No credentials');
}
const opt = Object.assign({}, this._storageConfig, config);
const { bucket, track, progressCallback } = opt;
const { contentType, contentDisposition, cacheControl, expires, metadata, tagging, acl } = opt;
const { serverSideEncryption, SSECustomerAlgorithm, SSECustomerKey, SSECustomerKeyMD5, SSEKMSKeyId } = opt;
const type = contentType ? contentType : 'binary/octet-stream';
const prefix = this._getPrefix(opt);
const final_key = prefix + key;
logger.debug('put ' + key + ' to ' + final_key);
const params = {
Bucket: bucket,
Key: final_key,
Body: object,
ContentType: type,
};
if (cacheControl) {
params.CacheControl = cacheControl;
}
if (contentDisposition) {
params.ContentDisposition = contentDisposition;
}
if (expires) {
params.Expires = expires;
}
if (metadata) {
params.Metadata = metadata;
}
if (tagging) {
params.Tagging = tagging;
}
if (serverSideEncryption) {
params.ServerSideEncryption = serverSideEncryption;
if (SSECustomerAlgorithm) {
params.SSECustomerAlgorithm = SSECustomerAlgorithm;
}
if (SSECustomerKey) {
params.SSECustomerKey = SSECustomerKey;
}
if (SSECustomerKeyMD5) {
params.SSECustomerKeyMD5 = SSECustomerKeyMD5;
}
if (SSEKMSKeyId) {
params.SSEKMSKeyId = SSEKMSKeyId;
}
}
const emitter = new events.EventEmitter();
const uploader = new StorageChunkManagedUpload_1.StorageChunkManagedUpload(params, this.credentials, opt, emitter);
if (acl) {
params.ACL = acl;
}
try {
emitter.on('sendProgress', (progress) => {
if (progressCallback) {
if (typeof progressCallback === 'function') {
progressCallback(progress);
}
else {
logger.warn('progressCallback should be a function, not a ' + typeof progressCallback);
}
}
});
const response = yield uploader.upload();
logger.debug('upload result', response);
dispatchStorageEvent(track, 'upload', { method: 'put', result: 'success' }, null, `Upload success for ${key}`);
return {
key,
};
}
catch (error) {
logger.warn('error uploading', error);
dispatchStorageEvent(track, 'upload', { method: 'put', result: 'failed' }, null, `Error uploading ${key}`);
throw error;
}
});
}
// return 'Storage';
getCategory() {
return StorageChunkUpload.category;
}
// return the name of you provider
getProviderName() {
return StorageChunkUpload.providerName;
}
/**
* @private
*/
_getPrefix(config) {
const { credentials, level } = config;
const customPrefix = config.customPrefix || {};
const identityId = config.identityId || credentials.identityId;
const privatePath = (customPrefix.private !== undefined ? customPrefix.private : 'private/') + identityId + '/';
const protectedPath = (customPrefix.protected !== undefined ? customPrefix.protected : 'protected/') + identityId + '/';
const publicPath = customPrefix.public !== undefined ? customPrefix.public : 'public/';
switch (level) {
case 'private':
return privatePath;
case 'protected':
return protectedPath;
default:
return publicPath;
}
}
/**
* @private
*/
_ensureCredentials() {
return this.credentials
.get()
.then((credentials) => {
if (!credentials)
return false;
const cred = this.credentials.shear(credentials);
logger.debug('set credentials for storage', cred);
this._storageConfig.credentials = cred;
return true;
})
.catch((error) => {
logger.warn('ensure credentials error', error);
return false;
});
}
}
exports.StorageChunkUpload = StorageChunkUpload;
// category and provider name
StorageChunkUpload.category = 'Storage';
StorageChunkUpload.providerName = 'StorageChunkUpload';
;