@devino.solutions/upup
Version:
437 lines (409 loc) • 15.8 kB
JavaScript
;
Object.defineProperty(exports, '__esModule', { value: true });
var clientS3 = require('@aws-sdk/client-s3');
var s3RequestPresigner = require('@aws-sdk/s3-request-presigner');
var crypto$1 = require('crypto');
var identity = require('@azure/identity');
var storageBlob = require('@azure/storage-blob');
// Unique ID creation requires a high quality random # generator. In the browser we therefore
// require the crypto API and do not support built-in fallback to lower quality random number
// generators (like Math.random()).
let getRandomValues;
const rnds8 = new Uint8Array(16);
function rng() {
// lazy load so that environments that need to polyfill have a chance to do so
if (!getRandomValues) {
// getRandomValues needs to be invoked in a context where "this" is a Crypto implementation.
getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto);
if (!getRandomValues) {
throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
}
}
return getRandomValues(rnds8);
}
/**
* Convert array of 16 byte values to UUID string format of the form:
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
*/
const byteToHex = [];
for (let i = 0; i < 256; ++i) {
byteToHex.push((i + 0x100).toString(16).slice(1));
}
function unsafeStringify(arr, offset = 0) {
// Note: Be careful editing this code! It's been tuned for performance
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]];
}
const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto);
var native = {
randomUUID
};
function v4(options, buf, offset) {
if (native.randomUUID && !buf && !options) {
return native.randomUUID();
}
options = options || {};
const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds[6] = rnds[6] & 0x0f | 0x40;
rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
if (buf) {
offset = offset || 0;
for (let i = 0; i < 16; ++i) {
buf[offset + i] = rnds[i];
}
return buf;
}
return unsafeStringify(rnds);
}
var UploadAdapter;
(function (UploadAdapter) {
UploadAdapter["INTERNAL"] = "INTERNAL";
UploadAdapter["GOOGLE_DRIVE"] = "GOOGLE_DRIVE";
UploadAdapter["ONE_DRIVE"] = "ONE_DRIVE";
UploadAdapter["LINK"] = "LINK";
UploadAdapter["CAMERA"] = "CAMERA";
// DROPBOX = 'DROPBOX',
// UNSPLASH = 'UNSPLASH',
// BOX = 'BOX',
})(UploadAdapter || (UploadAdapter = {}));
exports.UpupProvider = void 0;
(function (UpupProvider) {
UpupProvider["AWS"] = "aws";
UpupProvider["Azure"] = "azure";
UpupProvider["BackBlaze"] = "backblaze";
UpupProvider["DigitalOcean"] = "digitalocean";
})(exports.UpupProvider || (exports.UpupProvider = {}));
var UploadErrorType;
(function (UploadErrorType) {
UploadErrorType["PERMISSION_ERROR"] = "PERMISSION_ERROR";
UploadErrorType["EXPIRED_URL"] = "EXPIRED_URL";
UploadErrorType["FILE_VALIDATION_ERROR"] = "FILE_VALIDATION_ERROR";
UploadErrorType["PRESIGNED_URL_ERROR"] = "PRESIGNED_URL_ERROR";
UploadErrorType["SIGNED_URL_ERROR"] = "SIGNED_URL_ERROR";
UploadErrorType["CORS_CONFIG_ERROR"] = "CORS_CONFIG_ERROR";
UploadErrorType["TEMPORARY_CREDENTIALS_ERROR"] = "TEMPORARY_CREDENTIALS_ERROR";
UploadErrorType["UNKNOWN_UPLOAD_ERROR"] = "UNKNOWN_UPLOAD_ERROR";
})(UploadErrorType || (UploadErrorType = {}));
class UploadError extends Error {
constructor(message, type, retryable, status) {
if (type === void 0) {
type = UploadErrorType.UNKNOWN_UPLOAD_ERROR;
}
if (retryable === void 0) {
retryable = false;
}
super(message);
this.type = type;
this.retryable = retryable;
this.status = status;
this.DEFAULT_ERROR_STATUS_CODE = 500;
this.name = 'UploadError';
this.status = status || this.DEFAULT_ERROR_STATUS_CODE;
}
}
function checkFileType(accept, file) {
const fileType = file.type;
// Return false for invalid inputs
if (!accept || !fileType) return false;
// Validate fileType has proper MIME format (type/subtype)
const [type, subtype] = fileType.split('/');
if (!type || !subtype) return false;
const acceptedTypes = accept.split(',').map(t => t.trim());
const isValidType = acceptedTypes.includes('*') || acceptedTypes.some(type => {
if (type.includes('/*')) {
const [mainType] = type.split('/');
return fileType.startsWith(mainType);
}
return type.toLowerCase() === fileType.toLowerCase();
});
if (!isValidType) return false;
return isValidType;
}
const DEFAULT_MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB;
function fileValidateParams(file) {
// Validate required file params
const requiredFileParams = ['name', 'type', 'size'];
const missing = requiredFileParams.filter(key => !file[key]);
if (missing.length > 0) throw new UploadError(`Missing required file param: ${missing.join(', ')}`, UploadErrorType.FILE_VALIDATION_ERROR, false, 400);
const {
type: fileType,
accept = '*',
size,
maxFileSize = DEFAULT_MAX_FILE_SIZE
} = file;
// Validate file type against accept pattern
if (!checkFileType(accept, file)) throw new UploadError(`File type ${fileType} not allowed. Accepted types: ${accept}`, UploadErrorType.FILE_VALIDATION_ERROR, false, 400);
// Validate file size
if (size > maxFileSize) throw new UploadError(`File size: ${size} exceeds maximum limit of ${maxFileSize / (1024 * 1024)}MB`, UploadErrorType.FILE_VALIDATION_ERROR, false, 413);
}
const DEFAULT_URL_VALID_TIME = 3600 * 24 * 3; // 3 days
async function s3GenerateSignedUrl(s3ClientConfig, Key, Bucket, expiresIn) {
if (expiresIn === void 0) {
expiresIn = DEFAULT_URL_VALID_TIME;
}
try {
const s3Client = new clientS3.S3Client(s3ClientConfig);
const url = await s3RequestPresigner.getSignedUrl(s3Client, new clientS3.GetObjectCommand({
Bucket,
Key
}), {
expiresIn
});
return url;
} catch (error) {
throw new UploadError(error.message, UploadErrorType.SIGNED_URL_ERROR, false);
}
}
function hmac(key, message) {
return crypto$1.createHmac('sha256', key).update(message).digest();
}
function hash(message) {
return crypto$1.createHash('sha256').update(message).digest('hex');
}
function getSignatureKey(key, dateStamp, region, service) {
const kDate = hmac(`AWS4${key}`, dateStamp);
const kRegion = hmac(kDate, region);
const kService = hmac(kRegion, service);
const kSigning = hmac(kService, 'aws4_request');
return kSigning;
}
function calculateMD5(content) {
return crypto$1.createHash('md5').update(content).digest('base64');
}
function getHost(bucketName, provider, _ref) {
let {
endpoint,
region
} = _ref;
switch (provider) {
case exports.UpupProvider.AWS:
return `${bucketName}.s3.${region}.amazonaws.com`;
case exports.UpupProvider.BackBlaze:
return endpoint.split('https://')[1];
case exports.UpupProvider.DigitalOcean:
return `${bucketName}.${region}.digitaloceanspaces.com`;
default:
return '';
}
}
function awsGenerateSignatureHeaders(corsConfig, bucketName, _ref2, provider) {
let {
region,
credentials: {
accessKeyId,
secretAccessKey
},
endpoint
} = _ref2;
const service = 's3';
const host = getHost(bucketName, provider, {
endpoint,
region
});
// Calculate Content-MD5
const contentMD5 = calculateMD5(corsConfig);
// Step 1: Create date strings
const date = new Date();
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '');
const dateStamp = amzDate.slice(0, 8);
// Step 2: Create canonical request
const method = 'PUT';
const canonicalUriMap = {
[exports.UpupProvider.AWS]: '/',
[exports.UpupProvider.BackBlaze]: `/${bucketName}/`,
[exports.UpupProvider.DigitalOcean]: `/`,
[exports.UpupProvider.Azure]: ``
};
const canonicalUri = canonicalUriMap[provider];
const canonicalQueryStringMap = {
[exports.UpupProvider.AWS]: 'cors=',
[exports.UpupProvider.BackBlaze]: 'cors=null',
[exports.UpupProvider.DigitalOcean]: 'cors=',
[exports.UpupProvider.Azure]: ``
};
const canonicalQueryString = canonicalQueryStringMap[provider];
const payloadHash = hash(corsConfig);
const canonicalHeaders = `content-md5:${contentMD5}\n` + `content-type:application/xml\n` + `host:${host}\n` + `x-amz-content-sha256:${payloadHash}\n` + `x-amz-date:${amzDate}\n`;
const signedHeaders = 'content-md5;content-type;host;x-amz-content-sha256;x-amz-date';
const canonicalRequest = [method, canonicalUri, canonicalQueryString, canonicalHeaders, signedHeaders, payloadHash].join('\n');
// Step 3: Create string to sign
const algorithm = 'AWS4-HMAC-SHA256';
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`;
const stringToSign = [algorithm, amzDate, credentialScope, hash(canonicalRequest)].join('\n');
// Step 4: Calculate signature
const signingKey = getSignatureKey(secretAccessKey, dateStamp, region, service);
const signature = crypto$1.createHmac('sha256', signingKey).update(stringToSign).digest('hex');
// Step 5: Create authorization header
const authorizationHeader = `${algorithm} ` + `Credential=${accessKeyId}/${credentialScope}, ` + `SignedHeaders=${signedHeaders}, ` + `Signature=${signature}`;
return {
'Content-Type': 'application/xml',
'Content-MD5': contentMD5,
Authorization: authorizationHeader,
'x-amz-content-sha256': payloadHash,
'x-amz-date': amzDate,
Host: host
};
}
async function s3UpdateCORS(origin, bucketName, config, provider) {
const urlMap = {
[exports.UpupProvider.AWS]: `https://${bucketName}.s3.${config.region}.amazonaws.com/?cors`,
[exports.UpupProvider.BackBlaze]: `${config.endpoint}/${bucketName}/?cors=null`,
[exports.UpupProvider.DigitalOcean]: `https://${bucketName}.${config.region}.digitaloceanspaces.com/?cors`,
[exports.UpupProvider.Azure]: ``
};
const url = urlMap[provider];
const corsConfig = `<?xml version="1.0" encoding="UTF-8"?><CORSConfiguration>
<CORSRule>
<ID>Allow S3 Operations from my site: ${origin}</ID>
<AllowedOrigin>${origin}</AllowedOrigin>
<AllowedHeader>*</AllowedHeader>
<AllowedMethod>HEAD</AllowedMethod>
<AllowedMethod>PUT</AllowedMethod>
<AllowedMethod>GET</AllowedMethod>
<AllowedMethod>POST</AllowedMethod>
<ExposeHeader>ETag</ExposeHeader>
<MaxAgeSeconds>3600</MaxAgeSeconds>
</CORSRule>
</CORSConfiguration>`;
const headers = awsGenerateSignatureHeaders(corsConfig, bucketName, config, provider);
const response = await fetch(url, {
method: 'PUT',
body: corsConfig,
headers
});
if (!response.ok) {
const errorText = await response.text();
throw new UploadError(errorText, UploadErrorType.CORS_CONFIG_ERROR, false, response.status);
}
const data = await response.text();
return data;
}
const DEFAULT_EXPIRES_IN = 3600;
function getUploadErrorParams(error) {
const message = (error || {}).Message || error.message;
const errorType = (error || {}).Code || UploadErrorType.PRESIGNED_URL_ERROR;
return {
message,
errorType
};
}
async function s3GeneratePresignedUrl(_ref) {
let {
fileParams,
bucketName: Bucket,
s3ClientConfig,
expiresIn = DEFAULT_EXPIRES_IN,
origin,
provider
} = _ref;
const {
name: fileName,
type: ContentType,
size: ContentLength
} = fileParams;
try {
// Validate file params
fileValidateParams(fileParams);
// Configure CORS for request origin
await s3UpdateCORS(origin, Bucket, s3ClientConfig, provider);
// Create S3 client
const client = new clientS3.S3Client(s3ClientConfig);
// Generate unique key for the file
const Key = `${v4()}-${fileName}`;
// Create PutObject command
const command = new clientS3.PutObjectCommand({
Bucket,
Key,
ContentType,
ContentLength
});
// Generate presigned URL
const uploadUrl = await s3RequestPresigner.getSignedUrl(client, command, {
expiresIn,
signableHeaders: new Set(['content-type', 'content-length'])
});
// Generate public URL (if bucket is public)
const publicUrl = await s3GenerateSignedUrl(s3ClientConfig, Key, Bucket);
return {
key: Key,
publicUrl,
uploadUrl,
expiresIn
};
} catch (error) {
if (error instanceof UploadError) throw error;
const {
message,
errorType
} = getUploadErrorParams(error);
throw new UploadError(message, errorType, false, 500);
}
}
async function azureGetTemporaryCredentials(blobServiceClient, expiresIn) {
if (expiresIn === void 0) {
expiresIn = 3600;
}
try {
// Get start and end time for delegation key
const startsOn = new Date();
const expiresOn = new Date(startsOn);
expiresOn.setMinutes(startsOn.getMinutes() + expiresIn / 60); // 1 hour validity
// Get user delegation key
const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);
return userDelegationKey;
} catch (error) {
throw new UploadError(error.message, UploadErrorType.TEMPORARY_CREDENTIALS_ERROR, false, 500);
}
}
async function azureGenerateSasUrl(_ref) {
let {
fileParams,
containerName,
credentials,
expiresIn = 3600
} = _ref;
try {
// Validate file params
fileValidateParams(fileParams);
// Create Azure AD credentials
const credential = new identity.ClientSecretCredential(credentials.tenantId, credentials.clientId, credentials.clientSecret);
// Create blob service client
const blobServiceClient = new storageBlob.BlobServiceClient(`https://${credentials.storageAccount}.blob.core.windows.net`, credential);
// Get user delegation key
const userDelegationKey = await azureGetTemporaryCredentials(blobServiceClient);
const {
name: fileName,
type: contentType
} = fileParams;
const blobName = `${v4()}-${fileName}`;
// Get container client
const containerClient = blobServiceClient.getContainerClient(containerName);
// Get blob client
const blobClient = containerClient.getBlobClient(blobName);
// Generate SAS token
const sasToken = storageBlob.generateBlobSASQueryParameters({
containerName,
blobName,
permissions: storageBlob.BlobSASPermissions.parse('racw'),
startsOn: new Date(),
expiresOn: new Date(Date.now() + expiresIn * 1000),
protocol: storageBlob.SASProtocol.Https,
contentType
}, userDelegationKey, credentials.storageAccount).toString();
// Construct full URL
const uploadUrl = `${blobClient.url}?${sasToken}`;
return {
key: blobName,
publicUrl: blobClient.url,
uploadUrl,
expiresIn
};
} catch (error) {
if (error instanceof UploadError) throw error;
throw new UploadError(error.message, UploadErrorType.PRESIGNED_URL_ERROR, false, 500);
}
}
exports.azureGenerateSasUrl = azureGenerateSasUrl;
exports.s3GeneratePresignedUrl = s3GeneratePresignedUrl;
exports.s3GenerateSignedUrl = s3GenerateSignedUrl;
//# sourceMappingURL=node.cjs.development.js.map