@devicecloud.dev/dcd
Version:
Better cloud maestro testing
824 lines (823 loc) • 36.7 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.formatDurationSeconds = exports.writeJSONFile = exports.uploadBinary = exports.verifyAppZip = exports.compressFilesFromRelativePath = exports.compressFolderToBlob = exports.toBuffer = void 0;
const core_1 = require("@oclif/core");
const archiver = require("archiver");
const node_crypto_1 = require("node:crypto");
const node_fs_1 = require("node:fs");
const promises_1 = require("node:fs/promises");
const path = require("node:path");
const node_stream_1 = require("node:stream");
const StreamZip = require("node-stream-zip");
const api_gateway_1 = require("./gateways/api-gateway");
const supabase_gateway_1 = require("./gateways/supabase-gateway");
const metadata_extractor_service_1 = require("./services/metadata-extractor.service");
const styling_1 = require("./utils/styling");
const mimeTypeLookupByExtension = {
apk: 'application/vnd.android.package-archive',
yaml: 'application/x-yaml',
zip: 'application/zip',
};
const toBuffer = async (archive) => {
const chunks = [];
const writable = new node_stream_1.Writable();
writable._write = (chunk, _, callback) => {
// save to array to concatenate later
chunks.push(chunk);
callback();
};
// pipe to writable
archive.pipe(writable);
await archive.finalize();
// once done, concatenate chunks
return Buffer.concat(chunks);
};
exports.toBuffer = toBuffer;
const compressFolderToBlob = async (sourceDir) => {
const archive = archiver('zip', {
zlib: { level: 9 },
});
archive.on('error', (err) => {
throw err;
});
archive.directory(sourceDir, sourceDir.split('/').pop());
const buffer = await (0, exports.toBuffer)(archive);
return new Blob([buffer], { type: 'application/zip' });
};
exports.compressFolderToBlob = compressFolderToBlob;
const compressFilesFromRelativePath = async (basePath, files, commonRoot) => {
const archive = archiver('zip', {
zlib: { level: 9 },
});
archive.on('error', (err) => {
throw err;
});
for (const file of files) {
archive.file(path.resolve(basePath, file), {
name: file.replace(commonRoot, ''),
});
}
const buffer = await (0, exports.toBuffer)(archive);
// await writeFile('./my-zip.zip', buffer);
return buffer;
};
exports.compressFilesFromRelativePath = compressFilesFromRelativePath;
const verifyAppZip = async (zipPath) => {
// eslint-disable-next-line import/namespace, new-cap
const zip = await new StreamZip.async({
file: zipPath,
storeEntries: true,
});
const entries = await zip.entries();
const topLevelEntries = Object.values(entries).filter((entry) => !entry.name.split('/')[1]);
if (topLevelEntries.length !== 1 ||
!topLevelEntries[0].name.endsWith('.app/')) {
throw new Error('Zip file must contain exactly one entry which is a .app, check the contents of the zip file');
}
zip.close();
};
exports.verifyAppZip = verifyAppZip;
const uploadBinary = async (config) => {
const { filePath, apiUrl, apiKey, ignoreShaCheck = false, log = true, debug = false } = config;
if (log) {
core_1.ux.action.start(styling_1.colors.bold('Checking and uploading binary'), styling_1.colors.dim('Initializing'), {
stdout: true,
});
}
if (debug) {
console.log('[DEBUG] Binary upload started');
console.log(`[DEBUG] File path: ${filePath}`);
console.log(`[DEBUG] API URL: ${apiUrl}`);
console.log(`[DEBUG] Ignore SHA check: ${ignoreShaCheck}`);
}
const startTime = Date.now();
try {
// Prepare file for upload
const file = await prepareFileForUpload(filePath, debug, startTime);
// Calculate SHA hash
const sha = await calculateFileHash(file, debug, log);
// Check for existing upload with same SHA
if (!ignoreShaCheck && sha) {
const { exists, binaryId } = await checkExistingUpload(apiUrl, apiKey, sha, debug);
if (exists && binaryId) {
if (log) {
core_1.ux.info(styling_1.colors.dim('SHA hash matches existing binary with ID: ') + (0, styling_1.formatId)(binaryId) + styling_1.colors.dim(', skipping upload. Force upload with --ignore-sha-check'));
core_1.ux.action.stop(styling_1.colors.info('Skipping upload'));
}
return binaryId;
}
}
// Perform the upload
const uploadId = await performUpload({ apiKey, apiUrl, debug, file, filePath, sha, startTime });
if (log) {
core_1.ux.action.stop(styling_1.colors.success('\n✓ Binary uploaded with ID: ') + (0, styling_1.formatId)(uploadId));
}
return uploadId;
}
catch (error) {
if (log) {
core_1.ux.action.stop(styling_1.colors.error('✗ Failed'));
}
if (debug) {
console.error('[DEBUG] === BINARY UPLOAD FAILED ===');
console.error('[DEBUG] Binary upload failed:', error);
console.error(`[DEBUG] Error type: ${error instanceof Error ? error.name : typeof error}`);
console.error(`[DEBUG] Error message: ${error instanceof Error ? error.message : String(error)}`);
if (error instanceof Error && error.stack) {
console.error(`[DEBUG] Stack trace: ${error.stack}`);
}
console.error(`[DEBUG] Failed after ${Date.now() - startTime}ms`);
}
// Add helpful context for common errors
if (error instanceof Error) {
if (error.name === 'NetworkError') {
throw error; // NetworkError already has detailed troubleshooting info
}
// Re-throw with original message
throw error;
}
throw error;
}
};
exports.uploadBinary = uploadBinary;
/**
* Prepares a file for upload by reading or compressing it
* @param filePath Path to the file to upload
* @param debug Whether debug logging is enabled
* @param startTime Timestamp when upload started
* @returns Promise resolving to prepared File object
*/
async function prepareFileForUpload(filePath, debug, startTime) {
if (debug) {
console.log('[DEBUG] Preparing file for upload...');
}
let file;
if (filePath?.endsWith('.app')) {
if (debug) {
console.log('[DEBUG] Compressing .app folder to zip...');
}
// Validate that the .app directory exists before attempting to compress
// Without this check, archiver silently creates an empty 22-byte zip for non-existent paths
try {
await (0, promises_1.access)(filePath);
}
catch {
// Provide helpful error message for common quoting issues
const hasQuotes = filePath.includes("'") || filePath.includes('"');
const errorMessage = [
`App folder not found: ${filePath}`,
'',
hasQuotes
? 'Note: Your path contains quote characters. If the folder name has spaces, ensure quotes wrap the entire path:'
: 'Note: If your folder name contains spaces, wrap the entire path in quotes:',
hasQuotes
? ` ❌ Wrong: --app-file=./path/'My App Name.app' (quotes become literal characters)`
: ` Example: --app-file="./path/My App Name.app"`,
hasQuotes
? ` ✅ Right: --app-file="./path/My App Name.app" (quotes processed by shell)`
: '',
]
.filter(Boolean)
.join('\n');
throw new Error(errorMessage);
}
const zippedAppBlob = await (0, exports.compressFolderToBlob)(filePath);
file = new File([zippedAppBlob], filePath + '.zip');
if (debug) {
console.log(`[DEBUG] Compressed file size: ${(zippedAppBlob.size / 1024 / 1024).toFixed(2)} MB`);
}
}
else {
if (debug) {
console.log('[DEBUG] Reading binary file...');
}
const fileBuffer = await (0, promises_1.readFile)(filePath);
if (debug) {
console.log(`[DEBUG] File size: ${(fileBuffer.length / 1024 / 1024).toFixed(2)} MB`);
}
const binaryBlob = new Blob([new Uint8Array(fileBuffer)], {
type: mimeTypeLookupByExtension[filePath.split('.').pop()],
});
file = new File([binaryBlob], filePath);
}
if (debug) {
console.log(`[DEBUG] File preparation completed in ${Date.now() - startTime}ms`);
}
return file;
}
/**
* Calculates SHA-256 hash for a file
* @param file File to calculate hash for
* @param debug Whether debug logging is enabled
* @param log Whether to log warnings
* @returns Promise resolving to SHA-256 hash or undefined if failed
*/
async function calculateFileHash(file, debug, log) {
try {
if (debug) {
console.log('[DEBUG] Calculating SHA-256 hash...');
}
const hashStartTime = Date.now();
const sha = await getFileHashFromFile(file);
if (debug) {
console.log(`[DEBUG] SHA-256 hash: ${sha}`);
console.log(`[DEBUG] Hash calculation completed in ${Date.now() - hashStartTime}ms`);
}
return sha;
}
catch (error) {
if (log) {
console.warn('Warning: Failed to get file hash', error);
}
if (debug) {
console.error('[DEBUG] Hash calculation failed:', error);
}
return undefined;
}
}
/**
* Checks if an upload with the same SHA already exists
* @param apiUrl API base URL
* @param apiKey API authentication key
* @param sha SHA-256 hash to check
* @param debug Whether debug logging is enabled
* @returns Promise resolving to object with exists flag and optional binaryId
*/
async function checkExistingUpload(apiUrl, apiKey, sha, debug) {
try {
if (debug) {
console.log('[DEBUG] Checking for existing upload with matching SHA...');
console.log(`[DEBUG] Target endpoint: ${apiUrl}/uploads/checkForExistingUpload`);
}
const shaCheckStartTime = Date.now();
const { appBinaryId, exists } = await api_gateway_1.ApiGateway.checkForExistingUpload(apiUrl, apiKey, sha);
if (debug) {
console.log(`[DEBUG] SHA check completed in ${Date.now() - shaCheckStartTime}ms`);
console.log(`[DEBUG] Existing binary found: ${exists}`);
if (exists) {
console.log(`[DEBUG] Existing binary ID: ${appBinaryId}`);
}
}
return { binaryId: appBinaryId, exists };
}
catch (error) {
if (debug) {
console.error('[DEBUG] === SHA CHECK FAILED ===');
console.error('[DEBUG] Continuing with upload despite SHA check failure');
console.error(`[DEBUG] Error type: ${error instanceof Error ? error.name : typeof error}`);
console.error(`[DEBUG] Error message: ${error instanceof Error ? error.message : String(error)}`);
if (error instanceof Error && error.stack) {
console.error(`[DEBUG] Stack trace:\n${error.stack}`);
}
}
else if (error instanceof Error && error.name === 'NetworkError') {
// Even without debug, show a warning for network errors
console.warn('\nWarning: Failed to check for existing binary upload (network error).');
console.warn('Continuing with new upload...\n');
}
return { exists: false };
}
}
/**
* Uploads file to Supabase using resumable uploads
* @param env - Environment (dev or prod)
* @param tempPath - Temporary staging path for upload
* @param file - File to upload
* @param debug - Enable debug logging
* @returns Upload result with success status and any error
*/
async function uploadToSupabase(env, tempPath, file, debug) {
if (debug) {
console.log(`[DEBUG] Uploading to Supabase storage (${env}) using resumable uploads...`);
console.log(`[DEBUG] Staging path: ${tempPath}`);
console.log(`[DEBUG] File size: ${(file.size / 1024 / 1024).toFixed(2)} MB`);
}
try {
const uploadStartTime = Date.now();
await supabase_gateway_1.SupabaseGateway.uploadResumable(env, tempPath, file, debug);
if (debug) {
const uploadDuration = Date.now() - uploadStartTime;
const uploadDurationSeconds = uploadDuration / 1000;
const uploadSpeed = (file.size / 1024 / 1024) / uploadDurationSeconds;
console.log(`[DEBUG] Supabase resumable upload completed in ${uploadDurationSeconds.toFixed(2)}s (${uploadDuration}ms)`);
console.log(`[DEBUG] Average upload speed: ${uploadSpeed.toFixed(2)} MB/s`);
}
return { error: null, success: true };
}
catch (error) {
const uploadError = error instanceof Error ? error : new Error(String(error));
if (debug) {
console.error(`[DEBUG] === SUPABASE RESUMABLE UPLOAD FAILED ===`);
console.error(`[DEBUG] Error message: ${uploadError.message}`);
console.error(`[DEBUG] Error name: ${uploadError.name}`);
if (uploadError.stack) {
console.error(`[DEBUG] Error stack:\n${uploadError.stack}`);
}
console.error(`[DEBUG] Staging path: ${tempPath}`);
console.error(`[DEBUG] File size: ${file.size} bytes`);
console.log('[DEBUG] Will attempt Backblaze fallback if available...');
}
return { error: uploadError, success: false };
}
}
/**
* Handles Backblaze upload with appropriate strategy
* @param config - Configuration object for Backblaze upload
* @returns Upload result with success status and any error
*/
async function handleBackblazeUpload(config) {
const { b2, apiUrl, apiKey, finalPath, file, filePath, debug, supabaseSuccess } = config;
if (!b2) {
if (debug && !supabaseSuccess) {
console.log('[DEBUG] Backblaze not configured, cannot fallback');
}
return { error: null, success: false };
}
if (debug) {
console.log(supabaseSuccess ? '[DEBUG] Starting Backblaze backup upload...' : '[DEBUG] Starting Backblaze fallback upload...');
}
try {
const b2UploadStartTime = Date.now();
let backblazeSuccess = false;
if (b2.strategy === 'simple' && b2.simple) {
const simple = b2.simple;
backblazeSuccess = await uploadToBackblaze(simple.uploadUrl, simple.authorizationToken, `organizations/${finalPath}`, file, debug);
}
else if (b2.strategy === 'large' && b2.large) {
const large = b2.large;
backblazeSuccess = await uploadLargeFileToBackblaze({
apiKey,
apiUrl,
debug,
fileId: large.fileId,
fileName: `organizations/${finalPath}`,
fileObject: file,
filePath,
fileSize: file.size,
uploadPartUrls: large.uploadPartUrls,
});
}
if (debug) {
const duration = Date.now() - b2UploadStartTime;
const durationSeconds = duration / 1000;
console.log(backblazeSuccess
? `[DEBUG] Backblaze upload completed successfully in ${durationSeconds.toFixed(2)}s (${duration}ms)`
: `[DEBUG] Backblaze upload failed after ${durationSeconds.toFixed(2)}s (${duration}ms)`);
}
return { error: null, success: backblazeSuccess };
}
catch (error) {
const b2Error = error instanceof Error ? error : new Error(String(error));
if (debug) {
console.error(`[DEBUG] === UNEXPECTED BACKBLAZE UPLOAD ERROR ===`);
console.error(`[DEBUG] Error message: ${b2Error.message}`);
console.error(`[DEBUG] Error name: ${b2Error.name}`);
if (b2Error.stack) {
console.error(`[DEBUG] Error stack:\n${b2Error.stack}`);
}
console.error(`[DEBUG] Upload strategy: ${b2.strategy}`);
}
return { error: b2Error, success: false };
}
}
/**
* Requests upload URL and paths from API
* @param apiUrl - API base URL
* @param apiKey - API authentication key
* @param filePath - Path to the file being uploaded
* @param fileSize - Size of the file in bytes
* @param debug - Enable debug logging
* @returns Promise resolving to upload paths and configuration
*/
async function requestUploadPaths(apiUrl, apiKey, filePath, fileSize, debug) {
const platform = filePath?.endsWith('.apk') ? 'android' : 'ios';
if (debug) {
console.log('[DEBUG] Requesting upload URL...');
console.log(`[DEBUG] Target endpoint: ${apiUrl}/uploads/getBinaryUploadUrl`);
console.log(`[DEBUG] Platform: ${platform}`);
}
try {
const urlRequestStartTime = Date.now();
const { id, tempPath, finalPath, b2 } = await api_gateway_1.ApiGateway.getBinaryUploadUrl(apiUrl, apiKey, platform, fileSize);
if (debug) {
const hasStrategy = b2 && typeof b2 === 'object' && 'strategy' in b2;
console.log(`[DEBUG] Upload URL request completed in ${Date.now() - urlRequestStartTime}ms`);
console.log(`[DEBUG] Upload ID: ${id}`);
console.log(`[DEBUG] Temp path (TUS upload): ${tempPath}`);
console.log(`[DEBUG] Final path (after finalize): ${finalPath}`);
console.log(`[DEBUG] Backblaze upload URL provided: ${Boolean(b2)}`);
if (hasStrategy)
console.log(`[DEBUG] Backblaze strategy: ${b2.strategy}`);
}
if (!tempPath)
throw new Error('No upload path provided by API');
return { b2, finalPath, id, tempPath };
}
catch (error) {
if (debug) {
console.error('[DEBUG] === FAILED TO GET UPLOAD URL ===');
console.error(`[DEBUG] Error: ${error instanceof Error ? error.message : String(error)}`);
}
// Add context to the error
if (error instanceof Error) {
if (error.name === 'NetworkError') {
throw new Error(`Failed to request upload URL from API.\n\n${error.message}`);
}
throw new Error(`Failed to request upload URL: ${error.message}`);
}
throw error;
}
}
/**
* Extracts metadata from the binary file
* @param filePath - Path to the binary file
* @param debug - Enable debug logging
* @returns Promise resolving to extracted metadata containing appId and platform
*/
async function extractBinaryMetadata(filePath, debug) {
if (debug)
console.log('[DEBUG] Extracting app metadata...');
const metadataExtractor = new metadata_extractor_service_1.MetadataExtractorService();
const metadata = await metadataExtractor.extract(filePath);
if (!metadata) {
throw new Error(`Failed to extract metadata from ${filePath}. Supported formats: .apk, .app, .zip`);
}
if (debug)
console.log(`[DEBUG] Metadata extracted: ${JSON.stringify(metadata)}`);
return metadata;
}
/**
* Validates upload results and throws if all uploads failed
* @param supabaseSuccess - Whether Supabase upload succeeded
* @param backblazeSuccess - Whether Backblaze upload succeeded
* @param lastError - Last error encountered during uploads
* @param b2 - Backblaze configuration
* @param debug - Enable debug logging
* @returns void - throws error if all uploads failed
*/
function validateUploadResults(supabaseSuccess, backblazeSuccess, lastError, b2, debug) {
if (supabaseSuccess || backblazeSuccess) {
return;
}
if (debug) {
console.error(`[DEBUG] === ALL UPLOADS FAILED ===`);
console.error(`[DEBUG] Supabase upload: FAILED`);
console.error(`[DEBUG] Backblaze upload: ${b2 ? 'FAILED' : 'NOT CONFIGURED'}`);
if (lastError) {
console.error(`[DEBUG] Final error details:`);
console.error(`[DEBUG] - Message: ${lastError.message}`);
console.error(`[DEBUG] - Name: ${lastError.name}`);
console.error(`[DEBUG] - Stack: ${lastError.stack}`);
}
}
throw new Error(`All uploads failed. ${lastError ? `Last error: ${JSON.stringify({ message: lastError.message, name: lastError.name, stack: lastError.stack })}` : 'No upload targets available.'}`);
}
/**
* Performs the actual file upload
* @param config Configuration object for the upload
* @returns Promise resolving to upload ID
*/
async function performUpload(config) {
const { filePath, apiUrl, apiKey, file, sha, debug, startTime } = config;
// Request upload URL and paths
const { id, tempPath, finalPath, b2 } = await requestUploadPaths(apiUrl, apiKey, filePath, file.size, debug);
// Extract app metadata
const metadata = await extractBinaryMetadata(filePath, debug);
const env = apiUrl === 'https://api.devicecloud.dev' ? 'prod' : 'dev';
// Upload to Supabase
const supabaseResult = await uploadToSupabase(env, tempPath, file, debug);
let lastError = supabaseResult.error;
// Upload to Backblaze
const backblazeResult = await handleBackblazeUpload({
apiKey,
apiUrl,
b2: b2,
debug,
file,
filePath,
finalPath,
supabaseSuccess: supabaseResult.success,
});
// Update lastError if Supabase also failed
if (!supabaseResult.success && backblazeResult.error) {
lastError = backblazeResult.error;
}
// Validate results
validateUploadResults(supabaseResult.success, backblazeResult.success, lastError, b2, debug);
// Log upload summary
if (debug) {
const hasWarning = !supabaseResult.success && backblazeResult.success;
console.log(`[DEBUG] Upload summary - Supabase: ${supabaseResult.success ? '✓' : '✗'}, Backblaze: ${backblazeResult.success ? '✓' : '✗'}`);
console.log('[DEBUG] Finalizing upload...');
console.log(`[DEBUG] Target endpoint: ${apiUrl}/uploads/finaliseUpload`);
console.log(`[DEBUG] Uploaded to staging path: ${tempPath}`);
console.log(`[DEBUG] API will move to final path: ${finalPath}`);
console.log(`[DEBUG] Supabase upload status: ${supabaseResult.success ? 'SUCCESS' : 'FAILED'}`);
console.log(`[DEBUG] Backblaze upload status: ${backblazeResult.success ? 'SUCCESS' : 'FAILED'}`);
if (hasWarning)
console.log('[DEBUG] ⚠ Warning: File only exists in Backblaze (Supabase failed)');
}
// Finalize upload
const finalizeStartTime = Date.now();
await api_gateway_1.ApiGateway.finaliseUpload({
apiKey,
backblazeSuccess: backblazeResult.success,
baseUrl: apiUrl,
id,
metadata,
path: tempPath,
sha: sha,
supabaseSuccess: supabaseResult.success,
});
if (debug) {
console.log(`[DEBUG] Upload finalization completed in ${Date.now() - finalizeStartTime}ms`);
console.log(`[DEBUG] Total upload time: ${Date.now() - startTime}ms`);
}
return id;
}
/**
* Upload file to Backblaze using signed URL (simple upload for files < 100MB)
* @param uploadUrl - Backblaze upload URL
* @param authorizationToken - Authorization token for the upload
* @param fileName - Name/path of the file
* @param file - File to upload
* @param debug - Whether debug logging is enabled
* @returns Promise that resolves when upload completes or fails gracefully
*/
async function uploadToBackblaze(uploadUrl, authorizationToken, fileName, file, debug) {
try {
const arrayBuffer = await file.arrayBuffer();
// Calculate SHA1 hash for Backblaze (B2 requires SHA1, not SHA256)
const sha1 = (0, node_crypto_1.createHash)('sha1');
sha1.update(Buffer.from(arrayBuffer));
const sha1Hex = sha1.digest('hex');
// Detect if this is an S3 pre-signed URL (authorization token is empty)
const isS3PreSignedUrl = !authorizationToken || authorizationToken === '';
if (debug) {
console.log(`[DEBUG] Uploading to Backblaze URL: ${uploadUrl}`);
console.log(`[DEBUG] Upload method: ${isS3PreSignedUrl ? 'S3 pre-signed URL (PUT)' : 'B2 native API (POST)'}`);
console.log(`[DEBUG] File name: ${fileName}`);
console.log(`[DEBUG] File SHA1: ${sha1Hex}`);
}
// Build headers based on upload method
const headers = {
'Content-Length': file.size.toString(),
'Content-Type': file.type || 'application/octet-stream',
'X-Bz-Content-Sha1': sha1Hex,
};
// S3 pre-signed URLs have auth embedded in URL, native B2 uses Authorization header
if (!isS3PreSignedUrl) {
headers.Authorization = authorizationToken;
headers['X-Bz-File-Name'] = encodeURIComponent(fileName);
}
const response = await fetch(uploadUrl, {
body: arrayBuffer,
headers,
method: isS3PreSignedUrl ? 'PUT' : 'POST',
});
if (!response.ok) {
const errorText = await response.text();
if (debug) {
console.error(`[DEBUG] Backblaze upload failed with status ${response.status}: ${errorText}`);
}
// Don't throw - we don't want Backblaze failures to block the primary upload
console.warn(`Warning: Backblaze upload failed with status ${response.status}`);
return false;
}
if (debug) {
console.log('[DEBUG] Backblaze upload successful');
}
return true;
}
catch (error) {
if (debug) {
console.error('[DEBUG] === BACKBLAZE UPLOAD EXCEPTION ===');
console.error('[DEBUG] Backblaze upload exception:', error);
console.error(`[DEBUG] Error type: ${error instanceof Error ? error.name : typeof error}`);
console.error(`[DEBUG] Error message: ${error instanceof Error ? error.message : String(error)}`);
if (error instanceof Error && error.stack) {
console.error(`[DEBUG] Stack trace:\n${error.stack}`);
}
}
// Provide more specific error messages for common network errors
if (error instanceof TypeError && error.message === 'fetch failed') {
if (debug) {
console.error('[DEBUG] Network error detected - could be DNS, connection, or SSL issue');
}
console.warn('Warning: Backblaze upload failed due to network error');
}
else {
// Don't throw - we don't want Backblaze failures to block the primary upload
console.warn(`Warning: Backblaze upload failed: ${error instanceof Error ? error.message : String(error)}`);
}
return false;
}
}
/**
* Helper function to read a chunk from a file stream
* @param filePath - Path to the file
* @param start - Start byte position
* @param end - End byte position (exclusive)
* @returns Promise resolving to Buffer containing the chunk
*/
async function readFileChunk(filePath, start, end) {
return new Promise((resolve, reject) => {
const chunks = [];
const stream = (0, node_fs_1.createReadStream)(filePath, { start, end: end - 1 }); // end is inclusive in createReadStream
stream.on('data', (chunk) => {
chunks.push(chunk);
});
stream.on('end', () => {
resolve(Buffer.concat(chunks));
});
stream.on('error', (error) => {
reject(error);
});
});
}
/**
* Helper function to read a chunk from a File/Blob object
* @param file - File or Blob object
* @param start - Start byte position
* @param end - End byte position (exclusive)
* @returns Promise resolving to Buffer containing the chunk
*/
async function readFileObjectChunk(file, start, end) {
const slice = file.slice(start, end);
const arrayBuffer = await slice.arrayBuffer();
return Buffer.from(arrayBuffer);
}
/**
* Upload large file to Backblaze using multi-part upload with streaming (for files >= 5MB)
* Uses file streaming to avoid loading entire file into memory, preventing OOM errors on large files
* @param config - Configuration object for the large file upload
* @returns Promise that resolves when upload completes or fails gracefully
*/
async function uploadLargeFileToBackblaze(config) {
const { apiUrl, apiKey, fileId, uploadPartUrls, filePath, fileSize, debug, fileObject } = config;
try {
const partSha1Array = [];
// Calculate part size (divide file evenly across all parts)
const partSize = Math.ceil(fileSize / uploadPartUrls.length);
if (debug) {
console.log(`[DEBUG] Uploading large file in ${uploadPartUrls.length} parts (streaming mode)`);
console.log(`[DEBUG] Part size: ${(partSize / 1024 / 1024).toFixed(2)} MB`);
console.log(`[DEBUG] Reading from: ${fileObject ? 'in-memory File object' : filePath}`);
}
// Upload each part using streaming to avoid loading entire file into memory
for (let i = 0; i < uploadPartUrls.length; i++) {
const partNumber = i + 1;
const start = i * partSize;
const end = Math.min(start + partSize, fileSize);
const partLength = end - start;
if (debug) {
console.log(`[DEBUG] Reading part ${partNumber}/${uploadPartUrls.length} bytes ${start}-${end}`);
}
// Read part from File object (for .app bundles) or from disk
const partBuffer = fileObject
? await readFileObjectChunk(fileObject, start, end)
: await readFileChunk(filePath, start, end);
// Calculate SHA1 for this part
const sha1 = (0, node_crypto_1.createHash)('sha1');
sha1.update(partBuffer);
const sha1Hex = sha1.digest('hex');
partSha1Array.push(sha1Hex);
if (debug) {
console.log(`[DEBUG] Uploading part ${partNumber}/${uploadPartUrls.length} (${(partLength / 1024 / 1024).toFixed(2)} MB, SHA1: ${sha1Hex})`);
}
try {
const response = await fetch(uploadPartUrls[i].uploadUrl, {
body: new Uint8Array(partBuffer),
headers: {
Authorization: uploadPartUrls[i].authorizationToken,
'Content-Length': partLength.toString(),
'X-Bz-Content-Sha1': sha1Hex,
'X-Bz-Part-Number': partNumber.toString(),
},
method: 'POST',
});
if (!response.ok) {
const errorText = await response.text();
if (debug) {
console.error(`[DEBUG] Part ${partNumber} upload failed with status ${response.status}: ${errorText}`);
}
throw new Error(`Part ${partNumber} upload failed with status ${response.status}`);
}
}
catch (error) {
if (error instanceof TypeError && error.message === 'fetch failed') {
if (debug) {
console.error(`[DEBUG] Network error uploading part ${partNumber} - could be DNS, connection, or SSL issue`);
}
throw new Error(`Part ${partNumber} upload failed due to network error`);
}
throw error;
}
if (debug) {
console.log(`[DEBUG] Part ${partNumber}/${uploadPartUrls.length} uploaded successfully`);
}
}
// Validate all parts were uploaded
if (partSha1Array.length !== uploadPartUrls.length) {
const errorMsg = `Part count mismatch: uploaded ${partSha1Array.length} parts but expected ${uploadPartUrls.length}`;
if (debug) {
console.error(`[DEBUG] ${errorMsg}`);
}
throw new Error(errorMsg);
}
// Finish the large file upload
if (debug) {
console.log('[DEBUG] Finishing large file upload...');
console.log(`[DEBUG] Finalizing ${partSha1Array.length} parts with fileId: ${fileId}`);
}
await api_gateway_1.ApiGateway.finishLargeFile(apiUrl, apiKey, fileId, partSha1Array);
if (debug) {
console.log('[DEBUG] Large file upload completed successfully');
}
return true;
}
catch (error) {
if (debug) {
console.error('[DEBUG] === BACKBLAZE LARGE FILE UPLOAD EXCEPTION ===');
console.error('[DEBUG] Large file upload exception:', error);
console.error(`[DEBUG] Error type: ${error instanceof Error ? error.name : typeof error}`);
console.error(`[DEBUG] Error message: ${error instanceof Error ? error.message : String(error)}`);
if (error instanceof Error && error.stack) {
console.error(`[DEBUG] Stack trace:\n${error.stack}`);
}
}
// Provide more specific error messages
if (error instanceof Error && error.message.includes('network error')) {
console.warn('Warning: Backblaze large file upload failed due to network error');
}
else {
// Don't throw - we don't want Backblaze failures to block the primary upload
console.warn(`Warning: Backblaze large file upload failed: ${error instanceof Error ? error.message : String(error)}`);
}
return false;
}
}
async function getFileHashFromFile(file) {
return new Promise((resolve, reject) => {
const hash = (0, node_crypto_1.createHash)('sha256');
const stream = file.stream();
const reader = stream.getReader();
const processChunks = async () => {
try {
let readerResult = await reader.read();
while (!readerResult.done) {
const { value } = readerResult;
hash.update(value);
readerResult = await reader.read();
}
resolve(hash.digest('hex'));
}
catch (error) {
reject(error);
}
};
processChunks();
});
}
/**
* Writes JSON data to a file with error handling
* @param filePath - Path to the output JSON file
* @param data - Data to be serialized to JSON
* @param logger - Logger object with log and warn methods
* @returns true if successful, false if an error occurred
*/
const writeJSONFile = (filePath, data, logger) => {
try {
(0, node_fs_1.writeFileSync)(filePath, JSON.stringify(data, null, 2));
logger.log(styling_1.colors.dim('JSON output written to: ') + styling_1.colors.highlight(path.resolve(filePath)));
}
catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
const isPermissionError = errorMessage.includes('EACCES') || errorMessage.includes('EPERM');
const isNoSuchFileError = errorMessage.includes('ENOENT');
logger.warn(styling_1.colors.warning('⚠') + ' ' + styling_1.colors.error(`Failed to write JSON output to file: ${filePath}`));
if (isPermissionError) {
logger.warn(styling_1.colors.dim(' Permission denied - check file/directory write permissions'));
logger.warn(styling_1.colors.dim(' Try running with appropriate permissions or choose a different output location'));
}
else if (isNoSuchFileError) {
logger.warn(styling_1.colors.dim(' Directory does not exist - create the directory first or choose an existing path'));
}
logger.warn(styling_1.colors.dim(' Error details: ') + errorMessage);
}
};
exports.writeJSONFile = writeJSONFile;
/**
* Formats duration in seconds into a human readable string
* @param durationSeconds - Duration in seconds
* @returns Formatted duration string (e.g. "2m 30s" or "45s")
*/
const formatDurationSeconds = (durationSeconds) => {
const minutes = Math.floor(durationSeconds / 60);
const seconds = durationSeconds % 60;
if (minutes > 0) {
return `${minutes}m ${seconds}s`;
}
return `${durationSeconds}s`;
};
exports.formatDurationSeconds = formatDurationSeconds;