UNPKG

s3-cli-js

Version:

A TypeScript-based npm package that replaces AWS CLI for S3 operations using presigned URLs

587 lines 22.8 kB
"use strict"; /** * Advanced sync command implementation */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.syncCommand = syncCommand; const fs = __importStar(require("fs")); const path = __importStar(require("path")); const crypto = __importStar(require("crypto")); const chalk_1 = __importDefault(require("chalk")); const progress_1 = require("../utils/progress"); const concurrent_1 = require("../utils/concurrent"); /** * Advanced sync command with timestamp comparison and proper sync logic */ async function syncCommand(client, source, destination, options = {}) { try { const sourceIsS3 = (0, progress_1.isS3Uri)(source); const destIsS3 = (0, progress_1.isS3Uri)(destination); if (sourceIsS3 && destIsS3) { throw new Error('S3 to S3 sync is not supported in this version'); } console.log(chalk_1.default.blue(`Starting sync: ${source} -> ${destination}`)); console.log(chalk_1.default.blue(`Options: ${JSON.stringify(options, null, 2)}`)); let result; if (sourceIsS3 && !destIsS3) { // Download from S3 to local result = await syncFromS3ToLocal(client, source, destination, options); } else if (!sourceIsS3 && destIsS3) { // Upload from local to S3 result = await syncFromLocalToS3(client, source, destination, options); } else { throw new Error('At least one of source or destination must be an S3 URI'); } // Print summary printSyncSummary(result, options.dryRun || false); } catch (error) { console.error(chalk_1.default.red(`Sync failed: ${error.message}`)); throw error; } } /** * Sync from local directory to S3 */ async function syncFromLocalToS3(client, localDir, s3Uri, options) { const { bucket, key: keyPrefix } = (0, progress_1.parseS3Uri)(s3Uri); // Get local files const localFiles = await getLocalFiles(localDir, options); console.log(chalk_1.default.blue(`Found ${localFiles.length} local files`)); // Get S3 objects const s3Objects = await getS3Objects(client, bucket, keyPrefix, options); console.log(chalk_1.default.blue(`Found ${s3Objects.length} S3 objects`)); // Compare and determine actions const actions = await compareLocalToS3(localFiles, s3Objects, localDir, bucket, keyPrefix, options); // Execute actions return await executeSyncActions(client, actions, options); } /** * Sync from S3 to local directory */ async function syncFromS3ToLocal(client, s3Uri, localDir, options) { const { bucket, key: keyPrefix } = (0, progress_1.parseS3Uri)(s3Uri); // Get S3 objects const s3Objects = await getS3Objects(client, bucket, keyPrefix, options); console.log(chalk_1.default.blue(`Found ${s3Objects.length} S3 objects`)); // Get local files const localFiles = await getLocalFiles(localDir, options); console.log(chalk_1.default.blue(`Found ${localFiles.length} local files`)); // Compare and determine actions const actions = await compareS3ToLocal(s3Objects, localFiles, bucket, keyPrefix, localDir, options); // Execute actions return await executeSyncActions(client, actions, options); } /** * Calculate MD5 hash of a file */ function calculateFileMD5(filePath) { const fileBuffer = fs.readFileSync(filePath); const hashSum = crypto.createHash('md5'); hashSum.update(fileBuffer); return hashSum.digest('hex'); } /** * Check if ETag is a valid MD5 hash (not multipart upload) */ function isValidMD5ETag(etag) { // Remove quotes if present const cleanETag = etag.replace(/"/g, ''); // MD5 ETags are 32 hex characters, multipart ETags contain a dash return /^[a-f0-9]{32}$/i.test(cleanETag) && !cleanETag.includes('-'); } /** * Get local files with metadata */ async function getLocalFiles(dir, options) { const files = []; if (!fs.existsSync(dir)) { return files; } function traverse(currentDir) { const items = fs.readdirSync(currentDir); for (const item of items) { const fullPath = path.join(currentDir, item); const stats = fs.statSync(fullPath); if (stats.isFile()) { const filename = path.basename(fullPath); if ((0, progress_1.shouldIncludeFile)(filename, options.include, options.exclude)) { // Calculate MD5 by default (unless disabled) let md5Hash; if (!options.sizeOnly && !options.force && !options.noChecksum) { try { md5Hash = calculateFileMD5(fullPath); } catch (error) { console.warn(chalk_1.default.yellow(`Warning: Could not calculate MD5 for ${fullPath}: ${error}`)); } } files.push({ path: fullPath, size: stats.size, lastModified: stats.mtime, md5: md5Hash, isDirectory: false }); } } else if (stats.isDirectory()) { traverse(fullPath); } } } traverse(dir); return files; } /** * Get S3 objects with metadata */ async function getS3Objects(client, bucket, keyPrefix, options) { const objects = []; let continuationToken; do { const result = await client.listObjects({ bucket, prefix: keyPrefix, continuationToken, }); for (const object of result.objects) { const filename = path.basename(object.key); if ((0, progress_1.shouldIncludeFile)(filename, options.include, options.exclude)) { objects.push({ path: object.key, size: object.size, lastModified: object.lastModified, etag: object.etag, isDirectory: false }); } } continuationToken = result.nextContinuationToken; } while (continuationToken); return objects; } /** * Compare local files to S3 objects and determine sync actions */ async function compareLocalToS3(localFiles, s3Objects, localDir, bucket, keyPrefix, options) { const actions = []; // Create maps for efficient lookup const s3Map = new Map(); s3Objects.forEach(obj => { const relativePath = obj.path.startsWith(keyPrefix) ? obj.path.slice(keyPrefix.length).replace(/^\//, '') : obj.path; s3Map.set(relativePath, obj); }); // Check each local file for (const localFile of localFiles) { const relativePath = path.relative(localDir, localFile.path).replace(/\\/g, '/'); const s3Object = s3Map.get(relativePath); const compareResult = compareFiles(localFile, s3Object, options, 'localToS3'); // Construct S3 key properly - handle empty keyPrefix and avoid double slashes let s3Key; if (!keyPrefix || keyPrefix === '') { s3Key = relativePath; } else if (keyPrefix.endsWith('/')) { s3Key = keyPrefix + relativePath; } else { s3Key = keyPrefix + '/' + relativePath; } // Clean up any double slashes and remove leading slash s3Key = s3Key.replace(/\/+/g, '/').replace(/^\//, ''); if (compareResult === 'upload') { actions.push({ type: 'upload', reason: s3Object ? 'File differs' : 'File not in destination', sourceFile: localFile, destFile: s3Object, localPath: localFile.path, remotePath: s3Key, bucket: bucket, key: s3Key }); } else if (compareResult === 'skip') { actions.push({ type: 'skip', reason: 'Files are identical', sourceFile: localFile, destFile: s3Object, localPath: localFile.path, remotePath: s3Key, bucket: bucket, key: s3Key }); } // Remove from map to track processed files s3Map.delete(relativePath); } // Handle files that exist in S3 but not locally (for --delete) if (options.delete) { for (const [relativePath, s3Object] of s3Map) { actions.push({ type: 'delete', reason: 'File not in source', destFile: s3Object, localPath: path.join(localDir, relativePath), remotePath: s3Object.path, bucket: bucket, key: s3Object.path }); } } return actions; } /** * Compare S3 objects to local files and determine sync actions */ async function compareS3ToLocal(s3Objects, localFiles, bucket, keyPrefix, localDir, options) { const actions = []; // Create maps for efficient lookup const localMap = new Map(); localFiles.forEach(file => { const relativePath = path.relative(localDir, file.path).replace(/\\/g, '/'); localMap.set(relativePath, file); }); // Check each S3 object for (const s3Object of s3Objects) { const relativePath = s3Object.path.startsWith(keyPrefix) ? s3Object.path.slice(keyPrefix.length).replace(/^\//, '') : s3Object.path; const localFile = localMap.get(relativePath); const compareResult = compareFiles(s3Object, localFile, options, 's3ToLocal'); const localPath = path.join(localDir, relativePath); if (compareResult === 'download') { actions.push({ type: 'download', reason: localFile ? 'File differs' : 'File not in destination', sourceFile: s3Object, destFile: localFile, localPath: localPath, remotePath: s3Object.path, bucket: bucket, key: s3Object.path }); } else if (compareResult === 'skip') { actions.push({ type: 'skip', reason: 'Files are identical', sourceFile: s3Object, destFile: localFile, localPath: localPath, remotePath: s3Object.path, bucket: bucket, key: s3Object.path }); } // Remove from map to track processed files localMap.delete(relativePath); } // Handle files that exist locally but not in S3 (for --delete) if (options.delete) { for (const [relativePath, localFile] of localMap) { actions.push({ type: 'delete', reason: 'File not in source', destFile: localFile, localPath: localFile.path, remotePath: path.posix.join(keyPrefix, relativePath) }); } } return actions; } /** * Compare two files and determine sync action needed */ function compareFiles(sourceFile, destFile, options, syncDirection) { // If source doesn't exist, no action needed (unless deleting) if (!sourceFile) { return 'skip'; } // If destination doesn't exist, need to copy if (!destFile) { return syncDirection === 's3ToLocal' ? 'download' : 'upload'; } // If force option is set, always copy if (options.force) { return syncDirection === 's3ToLocal' ? 'download' : 'upload'; } // Size comparison if (sourceFile.size !== destFile.size) { return syncDirection === 's3ToLocal' ? 'download' : 'upload'; } // If size-only comparison is requested, files are identical if (options.sizeOnly) { return 'skip'; } // Checksum comparison (default behavior, unless disabled) if (!options.sizeOnly && !options.force && !options.noChecksum) { if (syncDirection === 's3ToLocal') { // S3 to local: sourceFile is S3, destFile is local // Compare S3 ETag with local MD5 if (sourceFile.etag && destFile.md5 && isValidMD5ETag(sourceFile.etag)) { const cleanETag = sourceFile.etag.replace(/"/g, ''); if (cleanETag.toLowerCase() !== destFile.md5.toLowerCase()) { return 'download'; } return 'skip'; // Checksums match } } else if (syncDirection === 'localToS3') { // Local to S3: sourceFile is local, destFile is S3 // Compare local MD5 with S3 ETag if (sourceFile.md5 && destFile.etag && isValidMD5ETag(destFile.etag)) { const cleanETag = destFile.etag.replace(/"/g, ''); if (sourceFile.md5.toLowerCase() !== cleanETag.toLowerCase()) { return 'upload'; } return 'skip'; // Checksums match } } // If checksum comparison was attempted but not possible (e.g., multipart upload), // fall back to timestamp comparison if ((sourceFile.etag && !isValidMD5ETag(sourceFile.etag)) || (destFile.etag && !isValidMD5ETag(destFile.etag))) { // Only show warning in verbose mode or when explicitly using checksum if (options.checksum) { console.warn(chalk_1.default.yellow(`Warning: Cannot use checksum comparison for multipart upload file. Falling back to timestamp comparison.`)); } } } // Timestamp comparison (default behavior or fallback) const sourceTime = sourceFile.lastModified.getTime(); const destTime = destFile.lastModified.getTime(); if (options.exactTimestamps) { // Exact timestamp match required if (sourceTime !== destTime) { return syncDirection === 's3ToLocal' ? 'download' : 'upload'; } } else { // Allow for small timestamp differences (1 second tolerance) const timeDiff = Math.abs(sourceTime - destTime); if (timeDiff > 1000) { // More than 1 second difference // Always sync if there's a significant time difference // The source is considered the authoritative version return syncDirection === 's3ToLocal' ? 'download' : 'upload'; } } // Files are considered identical return 'skip'; } /** * Execute sync actions */ async function executeSyncActions(client, actions, options) { const result = { actions, uploaded: 0, downloaded: 0, deleted: 0, skipped: 0, totalBytes: 0, errors: [] }; if (actions.length === 0) { console.log(chalk_1.default.green('Everything is already in sync!')); return result; } // Separate actions by type const uploadActions = actions.filter(a => a.type === 'upload'); const downloadActions = actions.filter(a => a.type === 'download'); const deleteActions = actions.filter(a => a.type === 'delete'); const skipActions = actions.filter(a => a.type === 'skip'); console.log(chalk_1.default.blue(`\nSync plan:`)); console.log(chalk_1.default.blue(` Upload: ${uploadActions.length} files`)); console.log(chalk_1.default.blue(` Download: ${downloadActions.length} files`)); console.log(chalk_1.default.blue(` Delete: ${deleteActions.length} files`)); console.log(chalk_1.default.blue(` Skip: ${skipActions.length} files`)); if (options.dryRun) { console.log(chalk_1.default.yellow('\n[DRY RUN] Actions that would be performed:')); actions.forEach(action => { const symbol = getActionSymbol(action.type); console.log(chalk_1.default.yellow(`${symbol} ${action.type}: ${action.localPath} (${action.reason})`)); }); return result; } const concurrency = options.concurrency || 5; // Execute uploads if (uploadActions.length > 0) { console.log(chalk_1.default.blue(`\nUploading ${uploadActions.length} files...`)); await executeUploads(client, uploadActions, concurrency, result); } // Execute downloads if (downloadActions.length > 0) { console.log(chalk_1.default.blue(`\nDownloading ${downloadActions.length} files...`)); await executeDownloads(client, downloadActions, concurrency, result); } // Execute deletes if (deleteActions.length > 0) { console.log(chalk_1.default.blue(`\nDeleting ${deleteActions.length} files...`)); await executeDeletes(client, deleteActions, result); } result.skipped = skipActions.length; result.totalBytes = actions.reduce((sum, action) => { return sum + (action.sourceFile?.size || 0); }, 0); return result; } /** * Execute upload actions */ async function executeUploads(client, uploadActions, concurrency, result) { const uploadOperations = uploadActions.map(action => { return { type: 'upload', localPath: action.localPath, bucket: action.bucket, key: action.key }; }); try { const uploadResult = await (0, concurrent_1.executeConcurrentOperations)(client, uploadOperations, { concurrency, dryRun: false, onProgress: (progress) => { console.log(chalk_1.default.cyan(`Upload progress: ${progress.completed}/${progress.total}`)); } }); result.uploaded = uploadResult.completed.length; result.errors.push(...uploadResult.failed.map(f => ({ action: uploadActions.find(a => a.localPath === f.operation.localPath), error: f.error }))); } catch (error) { console.error(chalk_1.default.red(`Upload failed: ${error}`)); } } /** * Execute download actions */ async function executeDownloads(client, downloadActions, concurrency, result) { const downloadOperations = downloadActions.map(action => ({ type: 'download', localPath: action.localPath, bucket: action.bucket, key: action.key, size: action.sourceFile?.size || 0 })); try { const downloadResult = await (0, concurrent_1.executeConcurrentOperations)(client, downloadOperations, { concurrency, dryRun: false, onProgress: (progress) => { console.log(chalk_1.default.cyan(`Download progress: ${progress.completed}/${progress.total}`)); } }); result.downloaded = downloadResult.completed.length; result.errors.push(...downloadResult.failed.map(f => ({ action: downloadActions.find(a => a.localPath === f.operation.localPath), error: f.error }))); } catch (error) { console.error(chalk_1.default.red(`Download failed: ${error}`)); } } /** * Execute delete actions */ async function executeDeletes(client, deleteActions, result) { for (const action of deleteActions) { try { if (action.key && action.bucket) { // Delete S3 object await client.deleteObject(action.bucket, action.key); console.log(chalk_1.default.red(`✗ Deleted: s3://${action.bucket}/${action.key}`)); } else { // Delete local file if (fs.existsSync(action.localPath)) { fs.unlinkSync(action.localPath); console.log(chalk_1.default.red(`✗ Deleted: ${action.localPath}`)); } } result.deleted++; } catch (error) { console.error(chalk_1.default.red(`Failed to delete ${action.localPath}: ${error}`)); result.errors.push({ action, error: error }); } } } /** * Get symbol for action type */ function getActionSymbol(type) { switch (type) { case 'upload': return '↑'; case 'download': return '↓'; case 'delete': return '✗'; case 'skip': return '='; default: return '?'; } } /** * Print sync summary */ function printSyncSummary(result, isDryRun) { const prefix = isDryRun ? '[DRY RUN] ' : ''; console.log(chalk_1.default.green(`\n${prefix}Sync completed!`)); console.log(chalk_1.default.green(` Uploaded: ${result.uploaded} files`)); console.log(chalk_1.default.green(` Downloaded: ${result.downloaded} files`)); console.log(chalk_1.default.green(` Deleted: ${result.deleted} files`)); console.log(chalk_1.default.green(` Skipped: ${result.skipped} files`)); console.log(chalk_1.default.green(` Total data: ${(0, progress_1.formatBytes)(result.totalBytes)}`)); if (result.errors.length > 0) { console.log(chalk_1.default.red(` Errors: ${result.errors.length}`)); result.errors.forEach(({ action, error }) => { console.log(chalk_1.default.red(` ${action.localPath}: ${error.message}`)); }); } } //# sourceMappingURL=sync.js.map