s3-cli-js
Version:
A TypeScript-based npm package that replaces AWS CLI for S3 operations using presigned URLs
190 lines • 8.48 kB
JavaScript
;
/**
* CLI interface for S3 operations
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const commander_1 = require("commander");
const chalk_1 = __importDefault(require("chalk"));
const s3_client_1 = require("./s3-client");
const config_1 = require("./utils/config");
const ls_1 = require("./commands/ls");
const cp_1 = require("./commands/cp");
const rm_1 = require("./commands/rm");
const mb_1 = require("./commands/mb");
const rb_1 = require("./commands/rb");
const sync_1 = require("./commands/sync");
const program = new commander_1.Command();
// Global options
program
.name('s3-cli')
.description('TypeScript-based S3 CLI tool using presigned URLs')
.version('1.0.0')
.option('--region <region>', 'AWS region')
.option('--profile <profile>', 'AWS profile to use')
.option('--endpoint-url <url>', 'Custom S3 endpoint URL');
// List command
program
.command('ls')
.description('List S3 buckets and objects')
.argument('[path]', 'S3 path (s3://bucket/key) or empty for buckets')
.option('-r, --recursive', 'List recursively')
.option('--human-readable', 'Display file sizes in human readable format')
.option('--summarize', 'Display summary information')
.action(async (path, options) => {
const client = await createS3Client(program.opts());
await (0, ls_1.listCommand)(client, path, options);
});
// Copy command
program
.command('cp')
.description('Copy files between local filesystem and S3')
.argument('<source>', 'Source path (local file/directory or S3 URI)')
.argument('<destination>', 'Destination path (local file/directory or S3 URI)')
.option('-r, --recursive', 'Copy recursively')
.option('--dry-run', 'Show what would be copied without actually copying')
.option('--exclude <patterns...>', 'Exclude files matching these patterns')
.option('--include <patterns...>', 'Include only files matching these patterns')
.option('--concurrency <number>', 'Number of concurrent operations (default: 5)', '5')
.action(async (source, destination, options) => {
const client = await createS3Client(program.opts());
const copyOptions = {
...options,
concurrency: parseInt(options.concurrency, 10)
};
await (0, cp_1.copyCommand)(client, source, destination, copyOptions);
});
// Remove command
program
.command('rm')
.description('Remove S3 objects')
.argument('<path>', 'S3 path to remove (s3://bucket/key)')
.option('-r, --recursive', 'Remove recursively')
.option('--dry-run', 'Show what would be removed without actually removing')
.option('--exclude <patterns...>', 'Exclude files matching these patterns')
.option('--include <patterns...>', 'Include only files matching these patterns')
.action(async (path, options) => {
const client = await createS3Client(program.opts());
await (0, rm_1.removeCommand)(client, path, options);
});
// Make bucket command
program
.command('mb')
.description('Create a new S3 bucket')
.argument('<bucket>', 'Bucket URI (s3://bucket-name)')
.action(async (bucket, options) => {
const client = await createS3Client(program.opts());
await (0, mb_1.makeBucketCommand)(client, bucket);
});
// Remove bucket command
program
.command('rb')
.description('Remove an S3 bucket')
.argument('<bucket>', 'Bucket URI (s3://bucket-name)')
.option('--force', 'Force delete bucket and all its contents')
.action(async (bucket, options) => {
const client = await createS3Client(program.opts());
await (0, rb_1.removeBucketCommand)(client, bucket, options);
});
// Move command (implemented as copy + delete)
program
.command('mv')
.description('Move files between local filesystem and S3')
.argument('<source>', 'Source path (local file/directory or S3 URI)')
.argument('<destination>', 'Destination path (local file/directory or S3 URI)')
.option('-r, --recursive', 'Move recursively')
.option('--dry-run', 'Show what would be moved without actually moving')
.option('--exclude <patterns...>', 'Exclude files matching these patterns')
.option('--include <patterns...>', 'Include only files matching these patterns')
.option('--concurrency <number>', 'Number of concurrent operations (default: 5)', '5')
.action(async (source, destination, options) => {
const client = await createS3Client(program.opts());
const copyOptions = {
...options,
concurrency: parseInt(options.concurrency, 10)
};
// First copy
await (0, cp_1.copyCommand)(client, source, destination, copyOptions);
// Then remove source (only if copy was successful and not dry run)
if (!options.dryRun) {
console.log(chalk_1.default.blue('Removing source after successful copy...'));
await (0, rm_1.removeCommand)(client, source, {
recursive: options.recursive,
exclude: options.exclude,
include: options.include
});
}
});
// Sync command (advanced implementation)
program
.command('sync')
.description('Synchronize directories between local filesystem and S3 with advanced features')
.argument('<source>', 'Source directory (local directory or S3 URI)')
.argument('<destination>', 'Destination directory (local directory or S3 URI)')
.option('--delete', 'Delete files in destination that do not exist in source')
.option('--dry-run', 'Show what would be synchronized without actually doing it')
.option('--exclude <patterns...>', 'Exclude files matching these patterns')
.option('--include <patterns...>', 'Include only files matching these patterns')
.option('--concurrency <number>', 'Number of concurrent operations (default: 5)', '5')
.option('--size-only', 'Compare files by size only (skip checksum and timestamp comparison)')
.option('--exact-timestamps', 'Require exact timestamp matches (default allows 1s tolerance)')
.option('--checksum', 'Enable verbose checksum warnings (checksums are used by default)')
.option('--no-checksum', 'Disable checksum comparison, use timestamp only')
.option('--force', 'Force overwrite all files regardless of checksums or timestamps')
.action(async (source, destination, options) => {
const client = await createS3Client(program.opts());
const syncOptions = {
source,
destination,
delete: options.delete,
dryRun: options.dryRun,
exclude: options.exclude,
include: options.include,
concurrency: parseInt(options.concurrency, 10),
sizeOnly: options.sizeOnly,
exactTimestamps: options.exactTimestamps,
checksum: options.checksum, // For verbose warnings
noChecksum: options.noChecksum, // To disable checksum comparison
force: options.force
};
await (0, sync_1.syncCommand)(client, source, destination, syncOptions);
});
/**
* Create S3 client with configuration
*/
async function createS3Client(globalOptions) {
try {
const config = (0, config_1.loadAWSConfig)();
// Override with command line options
if (globalOptions.region) {
config.region = globalOptions.region;
}
if (globalOptions.endpointUrl) {
config.endpoint = globalOptions.endpointUrl;
}
(0, config_1.validateConfig)(config);
return new s3_client_1.S3ClientWrapper(config);
}
catch (error) {
console.error(chalk_1.default.red(`Configuration Error: ${error.message}`));
console.error(chalk_1.default.yellow('\nPlease ensure AWS credentials are configured:'));
console.error(chalk_1.default.yellow('1. Set environment variables: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_DEFAULT_REGION'));
console.error(chalk_1.default.yellow('2. Or configure AWS credentials file: ~/.aws/credentials'));
process.exit(1);
}
}
// Error handling
process.on('unhandledRejection', (error) => {
console.error(chalk_1.default.red(`Unhandled error: ${error.message}`));
process.exit(1);
});
process.on('uncaughtException', (error) => {
console.error(chalk_1.default.red(`Uncaught exception: ${error.message}`));
process.exit(1);
});
// Parse command line arguments
program.parse();
//# sourceMappingURL=cli.js.map