@tasolutions/express-core
Version:
All libs for express
298 lines (278 loc) • 9.38 kB
JavaScript
const aws = require('aws-sdk');
const path = require('path');
const exifremove = require('exifremove');
const jo = require('jpeg-autorotate');
const stream = require('stream');
const request = require('request');
const archiver = require('archiver');
const { createPresignedPost } = require("@aws-sdk/s3-presigned-post");
const { S3Client } = require("@aws-sdk/client-s3");
const { awsInfo } = require('../config');
// Config AWS
const s3 = new aws.S3({
region: awsInfo.region,
accessKeyId: awsInfo.accessKeyId,
secretAccessKey: awsInfo.secretAccessKey,
signatureVersion: 'v4',
});
const validate = async (data) => {
const filetypes = /jpeg|jpg|png|pdf|doc|docx/;
data.name = data.name.replace(/\s/g, '-');
const extname = filetypes.test(path.extname(data.name).toLowerCase());
const mimetype = filetypes.test(data.mimetype);
if (!mimetype || !extname) {
return { status: false, message: 'Format not correct' };
}
let dataBinary = new Buffer(data.data, 'binary');
const type = data.mimetype.split('/')[0];
if (type === 'image') {
await jo.rotate(dataBinary, { quality: 85 }).then(({ buffer }) => {
dataBinary = exifremove.remove(buffer);
}).catch((error) => {
if (error.code === jo.errors.correct_orientation) {
console.log('The orientation of this image is already correct!')
}
})
}
let key = Date.now().toString() + '-' + data.name;
if (data.folder_path) {
key = data.folder_path + '/' + Date.now().toString() + '-' + data.name;
}
const params = {
Bucket: awsInfo.s3Info.bucket, // pass your bucket name
Key: key,
Body: dataBinary,
ContentType: data.mimetype,
};
return { status: true, data: params };
};
const upload = async (params) => {
return s3.upload(params).promise();
};
/**
* This function upload zip from buffer
* @param buffer
* @param name
* @param folder
*/
const uploadZip = async (buffer, name, folder) => {
const key = folder ? `${folder}/${name}` : `zip/${name}`;
let archive = archiver('zip');
archive.on('error', err => {
console.log('err', err);
throw new Error(err);
});
archive.append(buffer, { name: name });
archive.finalize();
const params = {
Bucket: awsInfo.s3Info.bucket, // pass your bucket name
Key: `${key}.zip`,
Body: archive,
ContentType: 'application/zip',
};
return s3.upload(params).promise();
}
/**
* This function read private file
* @param files
* @param destinationKey
*/
const createZipFile = async (files, destinationKey) => {
return await new Promise(async (resolve, reject) => {
let zipStream = streamTo(destinationKey);
zipStream.on('close', resolve);
zipStream.on('end', resolve);
zipStream.on('error', reject);
let archive = archiver('zip');
archive.on('error', err => {
throw new Error(err);
});
archive.pipe(zipStream);
for (let i = 0; i < files.length; i++) {
// data buffer
if (files[i].buffer)
archive.append(files[i].buffer, { name: files[i].name });
// data url
else if (files[i].file_url && files[i].file_name)
archive.append(request(files[i].file_url), { name: files[i].file_name });
}
archive.finalize();
}).catch(err => {
throw new Error(err);
});
};
/*
* This function upload zip file to s3
*/
const streamTo = (key, ContentType = 'application/zip') => {
const passthrough = new stream.PassThrough();
s3.upload(
{
Bucket: awsInfo.s3Info.bucket,
Key: key,
Body: passthrough,
ContentType,
ServerSideEncryption: 'AES256'
},
(err, data) => {
if (err) throw err;
}
);
return passthrough;
};
/**
* This function upload pdf file on shell plant
* @param body
* @param name
* @param folder
* @returns {Promise<void>}
*/
const uploadPdf = async (body, name, folder = '') => {
const key = folder ? folder + '/' + name : 'shell-plant' + '/' + name;
const params = {
Bucket: awsInfo.s3Info.bucket, // pass your bucket name
Key: key,
Body: body,
ContentType: 'application/pdf',
};
return s3.upload(params).promise();
};
/**
* This function generate Signed S3 URL
* @param body
* @used export check-in/out data - Time keeping service
* @returns {Promise<void>}
*/
const uploadXls = async (body, name) => {
const key = 'archive' + '/' + name;
const params = {
Bucket: awsInfo.s3Info.bucket, // pass your bucket name
Key: key,
Body: body,
ContentType: 'application/msexcel',
};
return s3.upload(params).promise();
};
/**
* This function
* @param {*} name
* @returns
*/
const uploadToStream = (key) => {
const pass = new stream.PassThrough();
const params = { Bucket: awsInfo.s3Info.bucket, Key: key, Body: pass };
s3.upload(params, function (err, data) {
if (err) console.log('uploadToStream Err', err);
});
return pass;
};
/**
* This function upload xls file
* @param body
* @param name
* @used on/off hire inspection - Plant service
* @returns {Promise<void>}
*/
const generateSignedS3URL = async (path = 'signed-archive', key, expires = null) => {
const filetypes = /jpeg|jpg|png|pdf/;
// validate file type
if (!filetypes.test((key.split('.').pop()).toLowerCase())) return { status: false, message: 'Format not correct' };
const signedUrl = s3.getSignedUrl("getObject", {
Key: path + '/' + Date.now().toString() + '-' + key,
Bucket: awsInfo.s3Info.bucket,
Expires: expires || 900, // S3 default is 900 seconds (15 minutes)
});
return signedUrl;
};
/**
* This function get s3 signed url
* @returns Object
*/
const generateSignedS3Url = async (body) => {
const client = new S3Client({
region: awsInfo.region,
accessKeyId: awsInfo.accessKeyId,
secretAccessKey: awsInfo.secretAccessKey,
signatureVersion: 'v4',
});
// file name from client
let path = 'uploads/tmp';
if (body.path) path = body.path + '/tmp';
const Key = path + '/' + Date.now().toString() + '-' + "${filename}";
// get condition from client
let Conditions = body.conditions && body.conditions.length ? body.conditions : [
{ acl: "public-read" },
{ bucket: awsInfo.s3Info.bucket },
];
// set expires time
const Expires = body.expires || 600;
// add starts-with folder
Conditions.push(["starts-with", "$key", path]);
return await createPresignedPost(client, {
Bucket: awsInfo.s3Info.bucket,
Key,
Conditions,
Fields: { acl: "public-read" },
Expires, // Seconds before the presigned post expires. 3600 by default.
});
};
/**
* This function move folder after signed completed
* @param {key, id, module} body
* @returns
*/
const moveSignedCompleted = async (body) => {
if (!body.key || !body.module || !body.id) throw { message: 'data input incorrect' }
if (!body.key.includes('tmp')) return null;
// get folder move from key of signed data
const folderToMove = body.key.substring(0, body.key.lastIndexOf('/') + 1);
if (!folderToMove) throw { message: '== move error with old folder ==' };
// get folder destination: uploads + <module folder> + id (uploads/user/user_id)
const destinationFolder = body.key.substring(0, body.key.indexOf('tmp')) + body.module + '/' + body.id;
try {
// get list item from s3 with prefix (old folder)
const listObjectsResponse = await s3.listObjects({
Bucket: awsInfo.s3Info.bucket,
Prefix: folderToMove,
Delimiter: '/',
}).promise();
// folder data
const folderContentInfo = listObjectsResponse.Contents;
// folder prefix
const folderPrefix = listObjectsResponse.Prefix;
// move files and delete item in old folder
await Promise.all(
folderContentInfo.map(async (fileInfo) => {
// move folder
await s3.copyObject({
Bucket: awsInfo.s3Info.bucket,
CopySource: `${awsInfo.s3Info.bucket}/${fileInfo.Key}`, // old file Key
Key: `${destinationFolder}/${fileInfo.Key.replace(folderPrefix, '')}`, // new file Key
}).promise();
// delete data
await s3.deleteObject({
Bucket: awsInfo.s3Info.bucket,
Key: fileInfo.Key,
}).promise();
})
);
// TODO: call api replace url in database by module {id, folderToMove, destinationFolder, module}
return { oldFolder: folderToMove, newFolder: destinationFolder };
} catch (err) {
console.error('[moveSignedCompleted]: ', err); // error handling
throw { message: err.message };
}
};
const s3Service = {
validate,
upload,
createZipFile,
uploadPdf,
uploadXls,
uploadZip,
generateSignedS3URL,
generateSignedS3Url,
uploadToStream,
moveSignedCompleted
};
module.exports = s3Service;