s3-cli-js
Version:
A TypeScript-based npm package that replaces AWS CLI for S3 operations using presigned URLs
314 lines • 13.6 kB
JavaScript
;
/**
* Copy files/objects command implementation
*/
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.copyCommand = copyCommand;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const chalk_1 = __importDefault(require("chalk"));
const progress_1 = require("../utils/progress");
const concurrent_1 = require("../utils/concurrent");
/**
* Copy files between local filesystem and S3
*/
async function copyCommand(client, source, destination, options = {}) {
try {
const sourceIsS3 = (0, progress_1.isS3Uri)(source);
const destIsS3 = (0, progress_1.isS3Uri)(destination);
if (sourceIsS3 && destIsS3) {
// S3 to S3 copy (not implemented in this version)
throw new Error('S3 to S3 copy is not supported in this version');
}
else if (sourceIsS3 && !destIsS3) {
// Download from S3 to local
await downloadFromS3(client, source, destination, options);
}
else if (!sourceIsS3 && destIsS3) {
// Upload from local to S3
await uploadToS3(client, source, destination, options);
}
else {
throw new Error('At least one of source or destination must be an S3 URI');
}
}
catch (error) {
console.error(chalk_1.default.red(`Error: ${error.message}`));
process.exit(1);
}
}
/**
* Upload files from local to S3
*/
async function uploadToS3(client, source, destination, options) {
const { bucket, key } = (0, progress_1.parseS3Uri)(destination);
if (!fs.existsSync(source)) {
throw new Error(`Source path does not exist: ${source}`);
}
const sourceStats = fs.statSync(source);
if (sourceStats.isFile()) {
// Single file upload
await uploadSingleFile(client, source, bucket, key, options);
}
else if (sourceStats.isDirectory()) {
if (!options.recursive) {
throw new Error('Use --recursive flag to copy directories');
}
// Directory upload
await uploadDirectory(client, source, bucket, key, options);
}
}
/**
* Download files from S3 to local
*/
async function downloadFromS3(client, source, destination, options) {
const { bucket, key } = (0, progress_1.parseS3Uri)(source);
if (key === '' || key.endsWith('/')) {
// Directory download
if (!options.recursive) {
throw new Error('Use --recursive flag to copy directories');
}
await downloadDirectory(client, bucket, key, destination, options);
}
else {
// Single file download
await downloadSingleFile(client, bucket, key, destination, options);
}
}
/**
* Upload a single file to S3
*/
async function uploadSingleFile(client, filePath, bucket, key, options) {
const filename = path.basename(filePath);
if (!(0, progress_1.shouldIncludeFile)(filename, options.include, options.exclude)) {
return;
}
// If key ends with '/', treat it as a directory and append filename
const finalKey = key.endsWith('/') ? (0, progress_1.joinS3Path)(key, filename) : key;
if (options.dryRun) {
console.log(chalk_1.default.blue(`[DRY RUN] upload: ${filePath} to s3://${bucket}/${finalKey}`));
return;
}
const fileStats = fs.statSync(filePath);
const progressBar = (0, progress_1.createProgressBar)(fileStats.size, `Uploading ${filename}`);
const progressCallback = (0, progress_1.createProgressCallback)(progressBar);
console.log(chalk_1.default.blue(`upload: ${filePath} to s3://${bucket}/${finalKey}`));
await client.uploadFile(filePath, bucket, finalKey, progressCallback);
console.log(chalk_1.default.green(`✓ Upload completed: ${(0, progress_1.formatBytes)(fileStats.size)}`));
}
/**
* Download a single file from S3
*/
async function downloadSingleFile(client, bucket, key, destination, options) {
const filename = path.basename(key);
if (!(0, progress_1.shouldIncludeFile)(filename, options.include, options.exclude)) {
return;
}
// Check if object exists
const exists = await client.objectExists(bucket, key);
if (!exists) {
throw new Error(`Object does not exist: s3://${bucket}/${key}`);
}
// Determine final destination path
let finalDestination = destination;
if (fs.existsSync(destination) && fs.statSync(destination).isDirectory()) {
finalDestination = path.join(destination, filename);
}
if (options.dryRun) {
console.log(chalk_1.default.blue(`[DRY RUN] download: s3://${bucket}/${key} to ${finalDestination}`));
return;
}
// Create directory if it doesn't exist
const destDir = path.dirname(finalDestination);
if (!fs.existsSync(destDir)) {
fs.mkdirSync(destDir, { recursive: true });
}
// Get object info for progress bar
const objects = await client.listObjects({ bucket, prefix: key });
const object = objects.objects.find(obj => obj.key === key);
const fileSize = object?.size || 0;
const progressBar = (0, progress_1.createProgressBar)(fileSize, `Downloading ${filename}`);
const progressCallback = (0, progress_1.createProgressCallback)(progressBar);
console.log(chalk_1.default.blue(`download: s3://${bucket}/${key} to ${finalDestination}`));
await client.downloadFile(bucket, key, finalDestination, progressCallback);
console.log(chalk_1.default.green(`✓ Download completed: ${(0, progress_1.formatBytes)(fileSize)}`));
}
/**
* Upload a directory to S3
*/
async function uploadDirectory(client, sourceDir, bucket, keyPrefix, options) {
const allFiles = getAllFiles(sourceDir);
// Filter files based on include/exclude patterns
const files = allFiles.filter(file => {
const filename = path.basename(file);
return (0, progress_1.shouldIncludeFile)(filename, options.include, options.exclude);
});
if (files.length === 0) {
console.log(chalk_1.default.yellow('No files to upload'));
return;
}
const concurrency = options.concurrency || 5; // Default to 5 concurrent uploads
if (concurrency === 1) {
// Use sequential processing for single concurrency
for (const file of files) {
const relativePath = path.relative(sourceDir, file);
const s3Key = (0, progress_1.joinS3Path)(keyPrefix, relativePath.replace(/\\/g, '/'));
await uploadSingleFile(client, file, bucket, s3Key, options);
}
return;
}
// Use concurrent processing
console.log(chalk_1.default.blue(`Starting upload of ${files.length} files with concurrency: ${concurrency}`));
const operations = (0, concurrent_1.createUploadOperations)(sourceDir, bucket, keyPrefix, files);
let lastProgress = { completed: 0, total: 0, failed: 0 };
const result = await (0, concurrent_1.executeConcurrentOperations)(client, operations, {
concurrency,
dryRun: options.dryRun,
onProgress: (progress) => {
// Only log progress updates periodically to avoid spam
if (progress.completed !== lastProgress.completed ||
progress.failed !== lastProgress.failed ||
progress.completed === progress.total) {
console.log(chalk_1.default.cyan(`Progress: ${progress.completed}/${progress.total} completed, ` +
`${progress.failed} failed, ${progress.inProgress} in progress ` +
`(${(0, progress_1.formatBytes)(progress.completedBytes)}/${(0, progress_1.formatBytes)(progress.totalBytes)})`));
lastProgress = { ...progress };
}
}
});
if (result.failed.length > 0) {
console.log(chalk_1.default.red(`\n${result.failed.length} uploads failed:`));
result.failed.forEach(({ operation, error }) => {
console.log(chalk_1.default.red(` ✗ ${operation.localPath}: ${error.message}`));
});
}
console.log(chalk_1.default.green(`\nUpload completed: ${result.completed.length} files, ` +
`${result.failed.length} failed, ${(0, progress_1.formatBytes)(result.totalBytes)} total`));
}
/**
* Download a directory from S3
*/
async function downloadDirectory(client, bucket, keyPrefix, destination, options) {
// First, collect all objects to download
const allObjects = [];
let continuationToken;
console.log(chalk_1.default.blue('Listing objects to download...'));
do {
const result = await client.listObjects({
bucket,
prefix: keyPrefix,
continuationToken,
});
// Filter objects based on include/exclude patterns
const filteredObjects = result.objects.filter(object => {
const filename = path.basename(object.key);
return (0, progress_1.shouldIncludeFile)(filename, options.include, options.exclude);
});
allObjects.push(...filteredObjects.map(obj => ({
key: obj.key,
size: obj.size
})));
continuationToken = result.nextContinuationToken;
} while (continuationToken);
if (allObjects.length === 0) {
console.log(chalk_1.default.yellow('No files to download'));
return;
}
const concurrency = options.concurrency || 5; // Default to 5 concurrent downloads
if (concurrency === 1) {
// Use sequential processing for single concurrency
for (const object of allObjects) {
const relativePath = object.key.startsWith(keyPrefix)
? object.key.slice(keyPrefix.length)
: object.key;
const localPath = path.join(destination, relativePath);
await downloadSingleFile(client, bucket, object.key, localPath, options);
}
return;
}
// Use concurrent processing
console.log(chalk_1.default.blue(`Starting download of ${allObjects.length} files with concurrency: ${concurrency}`));
const operations = (0, concurrent_1.createDownloadOperations)(bucket, allObjects, keyPrefix, destination);
let lastProgress = { completed: 0, total: 0, failed: 0 };
const result = await (0, concurrent_1.executeConcurrentOperations)(client, operations, {
concurrency,
dryRun: options.dryRun,
onProgress: (progress) => {
// Only log progress updates periodically to avoid spam
if (progress.completed !== lastProgress.completed ||
progress.failed !== lastProgress.failed ||
progress.completed === progress.total) {
console.log(chalk_1.default.cyan(`Progress: ${progress.completed}/${progress.total} completed, ` +
`${progress.failed} failed, ${progress.inProgress} in progress ` +
`(${(0, progress_1.formatBytes)(progress.completedBytes)}/${(0, progress_1.formatBytes)(progress.totalBytes)})`));
lastProgress = { ...progress };
}
}
});
if (result.failed.length > 0) {
console.log(chalk_1.default.red(`\n${result.failed.length} downloads failed:`));
result.failed.forEach(({ operation, error }) => {
console.log(chalk_1.default.red(` ✗ ${operation.key}: ${error.message}`));
});
}
console.log(chalk_1.default.green(`\nDownload completed: ${result.completed.length} files, ` +
`${result.failed.length} failed, ${(0, progress_1.formatBytes)(result.totalBytes)} total`));
}
/**
* Get all files in a directory recursively
*/
function getAllFiles(dir) {
const files = [];
function traverse(currentDir) {
const items = fs.readdirSync(currentDir);
for (const item of items) {
const fullPath = path.join(currentDir, item);
const stats = fs.statSync(fullPath);
if (stats.isFile()) {
files.push(fullPath);
}
else if (stats.isDirectory()) {
traverse(fullPath);
}
}
}
traverse(dir);
return files;
}
//# sourceMappingURL=cp.js.map