@tonyism1/db-utils
Version:
MySQL utilities with automatic table and column creation, plus B2 Backblaze media storage
432 lines (367 loc) • 13.5 kB
JavaScript
import B2 from 'backblaze-b2';
import sharp from 'sharp';
import crypto from 'crypto';
import path from 'path';
import fs from 'fs';
import { v4 as uuidv4 } from 'uuid';
import { toSnakeCase } from './utils.js';
export async function createStorage(config) {
const {
applicationKeyId,
applicationKey,
bucketId,
bucketName,
publicUrl
} = config;
if (!applicationKeyId || !applicationKey) {
throw new Error('B2 applicationKeyId and applicationKey are required in config');
}
if (!bucketId && !bucketName) {
throw new Error('Either bucketId or bucketName must be provided in config');
}
if (!publicUrl) {
throw new Error('publicUrl is required in config for generating public URLs');
}
const b2 = new B2({
applicationKeyId,
applicationKey
});
// Cache authentication and bucket info
let authData = null;
let bucketInfo = null;
async function authenticate() {
if (!authData) {
authData = await b2.authorize();
}
return authData;
}
async function ensureBucket() {
if (!bucketInfo) {
await authenticate();
if (bucketId) {
// Use existing bucket by ID
bucketInfo = {
bucketId,
bucketName: bucketName || 'unknown',
publicUrl
};
} else if (bucketName) {
// Find bucket by name
const bucketsResponse = await b2.listBuckets();
const existingBucket = bucketsResponse.data.buckets.find(
bucket => bucket.bucketName === bucketName
);
if (existingBucket) {
bucketInfo = {
bucketId: existingBucket.bucketId,
bucketName: existingBucket.bucketName,
publicUrl
};
} else {
throw new Error(`Bucket '${bucketName}' not found`);
}
}
}
return bucketInfo;
}
/**
* Computes the SHA1 hash of data
* @param {Buffer|string} data - Buffer or file path
* @returns {Promise<string>} The SHA1 hash as hexadecimal
*/
function getSha1Hash(data) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash('sha1');
if (Buffer.isBuffer(data)) {
hash.update(data);
resolve(hash.digest('hex'));
} else if (typeof data === 'string') {
const stream = fs.createReadStream(data);
stream.on('error', err => reject(err));
stream.on('data', chunk => hash.update(chunk));
stream.on('end', () => resolve(hash.digest('hex')));
} else {
reject(new Error('Invalid data type for SHA1 hash. Must be a string (file path) or Buffer.'));
}
});
}
/**
* Get content type based on file extension
*/
function getContentType(fileName) {
const ext = path.extname(fileName).toLowerCase();
const contentTypes = {
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.png': 'image/png',
'.gif': 'image/gif',
'.webp': 'image/webp',
'.mp4': 'video/mp4',
'.mov': 'video/quicktime',
'.avi': 'video/x-msvideo',
'.pdf': 'application/pdf',
'.txt': 'text/plain',
'.json': 'application/json'
};
return contentTypes[ext] || 'application/octet-stream';
}
/**
* Convert image to WebP format if it's an image
* @param {string|Buffer} data - File path or buffer
* @param {string} originalFileName - Original filename
* @returns {Object} - Processed data, fileName, and contentType
*/
async function processImageForUpload(data, originalFileName) {
const isImage = /\.(jpg|jpeg|png|gif|bmp|tiff|webp)$/i.test(originalFileName);
if (!isImage) {
return {
data: data,
fileName: originalFileName,
contentType: getContentType(originalFileName)
};
}
try {
let imageBuffer;
if (Buffer.isBuffer(data)) {
imageBuffer = data;
} else if (typeof data === 'string') {
imageBuffer = fs.readFileSync(data);
} else {
throw new Error('Invalid data type for image processing');
}
// Convert to WebP with quality optimization
const webpBuffer = await sharp(imageBuffer)
.webp({ quality: 85 })
.toBuffer();
// Generate new filename with .webp extension
const baseName = path.basename(originalFileName, path.extname(originalFileName));
const dirName = path.dirname(originalFileName);
const webpFileName = dirName === '.' ? `${baseName}.webp` : `${dirName}/${baseName}.webp`;
console.log(`[Image Processing] Converted ${originalFileName} to WebP format`);
return {
data: webpBuffer,
fileName: webpFileName,
contentType: 'image/webp'
};
} catch (error) {
console.error(`[Image Processing] Error converting ${originalFileName} to WebP:`, error);
// Fallback to original if conversion fails
return {
data: data,
fileName: originalFileName,
contentType: getContentType(originalFileName)
};
}
}
return {
b2,
async upload(fileName, data, options = {}) {
await ensureBucket();
const {
metadata = {},
folder = '',
generateUniqueId = false
} = options;
// Generate unique filename if requested
let finalFileName = fileName;
if (generateUniqueId) {
const ext = path.extname(fileName);
const baseName = path.basename(fileName, ext);
const uniqueId = uuidv4().substring(0, 8);
finalFileName = `${baseName}-${uniqueId}${ext}`;
}
// Construct full file path with optional folder
const fullFileName = folder ? `${folder}/${finalFileName}` : finalFileName;
const snakeFileName = toSnakeCase(fullFileName);
// Process image for WebP conversion if applicable
const processed = await processImageForUpload(data, snakeFileName);
// Get upload URL for the bucket
const uploadUrlResponse = await b2.getUploadUrl({
bucketId: bucketInfo.bucketId
});
// Prepare file content and hash
let fileContent = processed.data;
let contentLength;
let sha1;
if (Buffer.isBuffer(fileContent)) {
contentLength = fileContent.length;
sha1 = await getSha1Hash(fileContent);
} else if (typeof fileContent === 'string') {
const stats = fs.statSync(fileContent);
contentLength = stats.size;
sha1 = await getSha1Hash(fileContent);
fileContent = fs.createReadStream(fileContent);
} else {
throw new Error('Invalid data type for upload. Must be a string (file path) or Buffer.');
}
// Convert metadata keys to snake_case for B2 headers
const b2Info = {};
for (const [key, value] of Object.entries(metadata)) {
b2Info[toSnakeCase(key)] = value;
}
console.log(`[B2 Upload] Initiating upload for file: ${processed.fileName} (Size: ${contentLength} bytes)`);
// Upload the file
const uploadResponse = await b2.uploadFile({
uploadUrl: uploadUrlResponse.data.uploadUrl,
uploadAuthToken: uploadUrlResponse.data.authorizationToken,
fileName: processed.fileName,
data: fileContent,
contentLength,
contentType: processed.contentType,
hash: sha1,
info: {
'Content-Type': processed.contentType,
...b2Info
}
});
// Construct public URL for open buckets
const publicFileUrl = `${bucketInfo.publicUrl}/file/${bucketInfo.bucketName}/${processed.fileName}`;
console.log(`[B2 Upload] Successfully uploaded file to B2: ${publicFileUrl}`);
return {
fileId: uploadResponse.data.fileId,
fileName: uploadResponse.data.fileName,
originalFileName: fileName,
fileUrl: publicFileUrl,
contentType: uploadResponse.data.contentType,
contentLength: uploadResponse.data.contentLength,
uploadTimestamp: uploadResponse.data.uploadTimestamp
};
},
async download(fileName, options = {}) {
await ensureBucket();
const { folder = '' } = options;
const fullFileName = folder ? `${folder}/${fileName}` : fileName;
const snakeFileName = toSnakeCase(fullFileName);
const downloadResponse = await b2.downloadFileByName({
bucketName: bucketInfo.bucketName,
fileName: snakeFileName
});
return {
data: downloadResponse.data,
contentType: downloadResponse.headers['content-type'],
contentLength: downloadResponse.headers['content-length'],
metadata: this.extractMetadata(downloadResponse.headers)
};
},
async delete(fileName, options = {}) {
await ensureBucket();
const { folder = '', fileId = null } = options;
let targetFileId = fileId;
let targetFileName = fileName;
if (folder) {
targetFileName = `${folder}/${fileName}`;
}
targetFileName = toSnakeCase(targetFileName);
// If fileId not provided, get it from file info
if (!targetFileId) {
const fileInfo = await this.getFileInfo(targetFileName);
if (!fileInfo) {
throw new Error(`File '${targetFileName}' not found`);
}
targetFileId = fileInfo.fileId;
}
await b2.deleteFileVersion({
fileId: targetFileId,
fileName: targetFileName
});
console.log(`[B2 Delete] Successfully deleted file: ${targetFileName}`);
return { success: true, fileName: targetFileName, fileId: targetFileId };
},
async list(options = {}) {
await ensureBucket();
const {
folder = '',
maxFileCount = 100,
startFileName = null
} = options;
const prefix = folder ? `${toSnakeCase(folder)}/` : '';
const listResponse = await b2.listFileNames({
bucketId: bucketInfo.bucketId,
maxFileCount,
startFileName,
prefix
});
return listResponse.data.files.map(file => ({
fileId: file.fileId,
fileName: file.fileName,
size: file.size,
uploadTimestamp: file.uploadTimestamp,
action: file.action,
fileUrl: `${bucketInfo.publicUrl}/file/${bucketInfo.bucketName}/${file.fileName}`
}));
},
async getFileInfo(fileName) {
await ensureBucket();
try {
const listResponse = await b2.listFileNames({
bucketId: bucketInfo.bucketId,
maxFileCount: 1,
startFileName: fileName
});
const file = listResponse.data.files.find(f => f.fileName === fileName);
if (file) {
return {
...file,
fileUrl: `${bucketInfo.publicUrl}/file/${bucketInfo.bucketName}/${file.fileName}`
};
}
return null;
} catch (error) {
return null;
}
},
async getDownloadUrl(fileName, options = {}) {
await ensureBucket();
const { folder = '', validDurationInSeconds = 3600 } = options;
const fullFileName = folder ? `${folder}/${fileName}` : fileName;
const snakeFileName = toSnakeCase(fullFileName);
// For public buckets, return the direct public URL
const publicFileUrl = `${bucketInfo.publicUrl}/file/${bucketInfo.bucketName}/${snakeFileName}`;
// For signed URLs (if needed for private access)
if (validDurationInSeconds && validDurationInSeconds > 0) {
try {
const downloadAuth = await b2.getDownloadAuthorization({
bucketId: bucketInfo.bucketId,
fileNamePrefix: snakeFileName,
validDurationInSeconds
});
return `${publicFileUrl}?Authorization=${downloadAuth.data.authorizationToken}`;
} catch (error) {
console.warn('[B2 Signed URL] Could not generate signed URL, returning public URL');
return publicFileUrl;
}
}
return publicFileUrl;
},
async createFolder(folderName) {
// B2 doesn't have actual folders, but we can create a placeholder file
const folderPath = `${toSnakeCase(folderName)}/`;
return await this.upload('.bzEmpty', Buffer.from(''), {
folder: folderPath.slice(0, -1), // Remove trailing slash
metadata: {
contentType: 'application/x-bz-folder',
createdAt: new Date().toISOString()
}
});
},
extractMetadata(headers) {
const metadata = {};
for (const [key, value] of Object.entries(headers)) {
if (key.startsWith('x-bz-info-')) {
const metaKey = key.replace('x-bz-info-', '').replace(/_/g, '-');
metadata[metaKey] = value;
}
}
return metadata;
},
async getBucketInfo() {
await ensureBucket();
return bucketInfo;
},
async query(operation, params = {}) {
// Generic method for custom B2 operations
await authenticate();
return await b2[operation](params);
}
};
}