@aws-amplify/storage
Version:
Storage category of aws-amplify
417 lines (374 loc) • 14.5 kB
text/typescript
/*
* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
import {
ConsoleLogger as Logger,
Hub,
Credentials,
Parser
} from '@aws-amplify/core';
import * as S3 from 'aws-sdk/clients/s3';
import { StorageOptions, StorageProvider } from '../types';
const logger = new Logger('AWSS3Provider');
const AMPLIFY_SYMBOL = ((typeof Symbol !== 'undefined' && typeof Symbol.for === 'function') ?
Symbol.for('amplify_default') : '@@amplify_default') as Symbol;
const dispatchStorageEvent = (track:boolean, event:string, attrs:any, metrics:any, message:string) => {
if (track) {
Hub.dispatch(
'storage',
{
event,
data: { attrs, metrics },
message
},
'Storage',
AMPLIFY_SYMBOL
);
}
};
/**
* Provide storage methods to use AWS S3
*/
export default class AWSS3Provider implements StorageProvider{
static CATEGORY = 'Storage';
static PROVIDER_NAME = 'AWSS3';
/**
* @private
*/
private _config;
/**
* Initialize Storage with AWS configurations
* @param {Object} config - Configuration object for storage
*/
constructor(config?: StorageOptions) {
this._config = config ? config: {};
logger.debug('Storage Options', this._config);
}
/**
* get the category of the plugin
*/
public getCategory(): string {
return AWSS3Provider.CATEGORY;
}
/**
* get provider name of the plugin
*/
getProviderName(): string {
return AWSS3Provider.PROVIDER_NAME;
}
/**
* Configure Storage part with aws configuration
* @param {Object} config - Configuration of the Storage
* @return {Object} - Current configuration
*/
public configure(config?): object {
logger.debug('configure Storage', config);
if (!config) return this._config;
const amplifyConfig = Parser.parseMobilehubConfig(config);
this._config = Object.assign({}, this._config, amplifyConfig.Storage);
if (!this._config.bucket) { logger.debug('Do not have bucket yet'); }
return this._config;
}
/**
* Get a presigned URL of the file or the object data when download:true
*
* @param {String} key - key of the object
* @param {Object} [config] - { level : private|protected|public, download: true|false }
* @return - A promise resolves to Amazon S3 presigned URL on success
*/
public async get(key: string, config?): Promise<String|Object> {
const credentialsOK = await this._ensureCredentials();
if (!credentialsOK) { return Promise.reject('No credentials'); }
const opt = Object.assign({}, this._config, config);
const { bucket, region, credentials, level, download, track, expires } = opt;
const prefix = this._prefix(opt);
const final_key = prefix + key;
const s3 = this._createS3(opt);
logger.debug('get ' + key + ' from ' + final_key);
const params: any = {
Bucket: bucket,
Key: final_key
};
if (download === true) {
return new Promise<any>((res, rej) => {
s3.getObject(params, (err, data) => {
if (err) {
dispatchStorageEvent(
track,
'download',
{
method: 'get',
result: 'failed'
},
null,
`Download failed with ${err.message}`
);
rej(err);
} else {
dispatchStorageEvent(
track,
'download',
{ method: 'get', result: 'success' },
{ fileSize: Number(data.Body['length']) },
`Download success for ${key}`
);
res(data);
}
});
});
}
if (expires) { params.Expires = expires; }
return new Promise<string>((res, rej) => {
try {
const url = s3.getSignedUrl('getObject', params);
dispatchStorageEvent(
track,
'getSignedUrl',
{ method: 'get', result: 'success' },
null,
`Signed URL: ${url}`
);
res(url);
} catch (e) {
logger.warn('get signed url error', e);
dispatchStorageEvent(
track,
'getSignedUrl',
{ method: 'get', result: 'failed' },
null,
`Could not get a signed URL for ${key}`
);
rej(e);
}
});
}
/**
* Put a file in S3 bucket specified to configure method
* @param {String} key - key of the object
* @param {Object} object - File to be put in Amazon S3 bucket
* @param {Object} [config] - { level : private|protected|public, contentType: MIME Types,
* progressCallback: function }
* @return - promise resolves to object on success
*/
public async put(key: string, object, config?): Promise<Object> {
const credentialsOK = await this._ensureCredentials();
if (!credentialsOK) { return Promise.reject('No credentials'); }
const opt = Object.assign({}, this._config, config);
const { bucket, region, credentials, level, track, progressCallback } = opt;
const { contentType, contentDisposition, cacheControl, expires, metadata, tagging } = opt;
const { serverSideEncryption, SSECustomerAlgorithm, SSECustomerKey, SSECustomerKeyMD5, SSEKMSKeyId } = opt;
const type = contentType ? contentType : 'binary/octet-stream';
const prefix = this._prefix(opt);
const final_key = prefix + key;
const s3 = this._createS3(opt);
logger.debug('put ' + key + ' to ' + final_key);
const params: any = {
Bucket: bucket,
Key: final_key,
Body: object,
ContentType: type
};
if (cacheControl) { params.CacheControl = cacheControl; }
if (contentDisposition) { params.ContentDisposition = contentDisposition; }
if (expires) { params.Expires = expires; }
if (metadata) { params.Metadata = metadata; }
if (tagging) { params.Tagging = tagging; }
if (serverSideEncryption) {
params.ServerSideEncryption = serverSideEncryption;
if (SSECustomerAlgorithm) { params.SSECustomerAlgorithm = SSECustomerAlgorithm; }
if (SSECustomerKey) { params.SSECustomerKey = SSECustomerKey; }
if (SSECustomerKeyMD5) { params.SSECustomerKeyMD5 = SSECustomerKeyMD5; }
if (SSEKMSKeyId) { params.SSEKMSKeyId = SSEKMSKeyId; }
}
try {
const upload = s3
.upload(params)
.on('httpUploadProgress', progress => {
if (progressCallback) {
if (typeof progressCallback === 'function') {
progressCallback(progress);
} else {
logger.warn('progressCallback should be a function, not a ' + typeof progressCallback);
}
}
});
const data = await upload.promise();
logger.debug('upload result', data);
dispatchStorageEvent(
track,
'upload',
{ method: 'put', result: 'success' },
null,
`Upload success for ${key}`
);
return {
key: data.Key.substr(prefix.length)
};
} catch (e) {
logger.warn("error uploading", e);
dispatchStorageEvent(
track,
'upload',
{ method: 'put', result: 'failed' },
null,
`Error uploading ${key}`
);
throw e;
}
}
/**
* Remove the object for specified key
* @param {String} key - key of the object
* @param {Object} [config] - { level : private|protected|public }
* @return - Promise resolves upon successful removal of the object
*/
public async remove(key: string, config?): Promise<any> {
const credentialsOK = await this._ensureCredentials();
if (!credentialsOK) { return Promise.reject('No credentials'); }
const opt = Object.assign({}, this._config, config, );
const { bucket, region, credentials, level, track } = opt;
const prefix = this._prefix(opt);
const final_key = prefix + key;
const s3 = this._createS3(opt);
logger.debug('remove ' + key + ' from ' + final_key);
const params = {
Bucket: bucket,
Key: final_key
};
return new Promise<any>((res, rej) => {
s3.deleteObject(params, (err, data) => {
if (err) {
dispatchStorageEvent(
track,
'delete',
{ method: 'remove', result: 'failed' },
null,
`Deletion of ${key} failed with ${err}`
);
rej(err);
} else {
dispatchStorageEvent(
track,
'delete',
{ method: 'remove', result: 'success' },
null,
`Deleted ${key} successfully`
);
res(data);
}
});
});
}
/**
* List bucket objects relative to the level and prefix specified
* @param {String} path - the path that contains objects
* @param {Object} [config] - { level : private|protected|public }
* @return - Promise resolves to list of keys for all objects in path
*/
public async list(path, config?): Promise<any> {
const credentialsOK = await this._ensureCredentials();
if (!credentialsOK) { return Promise.reject('No credentials'); }
const opt = Object.assign({}, this._config, config);
const { bucket, region, credentials, level, download, track } = opt;
const prefix = this._prefix(opt);
const final_path = prefix + path;
const s3 = this._createS3(opt);
logger.debug('list ' + path + ' from ' + final_path);
const params = {
Bucket: bucket,
Prefix: final_path
};
return new Promise<any>((res, rej) => {
s3.listObjects(params, (err, data) => {
if (err) {
logger.warn('list error', err);
dispatchStorageEvent(
track,
'list',
{ method: 'list', result: 'failed' },
null,
`Listing items failed: ${err.message}`
);
rej(err);
} else {
const list = data.Contents.map(item => {
return {
key: item.Key.substr(prefix.length),
eTag: item.ETag,
lastModified: item.LastModified,
size: item.Size
};
});
dispatchStorageEvent(
track,
'list',
{ method: 'list', result: 'success' },
null,
`${list.length} items returned from list operation`
);
logger.debug('list', list);
res(list);
}
});
});
}
/**
* @private
*/
_ensureCredentials() {
return Credentials.get()
.then(credentials => {
if (!credentials) return false;
const cred = Credentials.shear(credentials);
logger.debug('set credentials for storage', cred);
this._config.credentials = cred;
return true;
})
.catch(err => {
logger.warn('ensure credentials error', err);
return false;
});
}
/**
* @private
*/
private _prefix(config) {
const { credentials, level } = config;
const customPrefix = config.customPrefix || {};
const identityId = config.identityId || credentials.identityId;
const privatePath = (customPrefix.private !== undefined ? customPrefix.private : 'private/') + identityId + '/';
const protectedPath = (customPrefix.protected !== undefined ?
customPrefix.protected : 'protected/') + identityId + '/';
const publicPath = customPrefix.public !== undefined ? customPrefix.public : 'public/';
switch (level) {
case 'private':
return privatePath;
case 'protected':
return protectedPath;
default:
return publicPath;
}
}
/**
* @private
*/
private _createS3(config) {
const { bucket, region, credentials } = config;
return new S3({
apiVersion: '2006-03-01',
params: { Bucket: bucket },
signatureVersion: 'v4',
region,
credentials
});
}
}