@fiftyten/db-toolkit
Version:
Complete database toolkit: connections, migration, and operations via AWS Session Manager
1,032 lines โข 47.2 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.PgMigrationManager = void 0;
const chalk_1 = __importDefault(require("chalk"));
const readline = __importStar(require("readline"));
const child_process_1 = require("child_process");
const client_ec2_1 = require("@aws-sdk/client-ec2");
const client_ssm_1 = require("@aws-sdk/client-ssm");
const cloudformation_manager_1 = require("./cloudformation-manager");
const migration_manager_1 = require("./migration-manager");
const database_connector_1 = require("./database-connector");
const mfa_auth_1 = require("./mfa-auth");
// Helper function to prompt for confirmation
function promptConfirmation(message) {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
return new Promise((resolve) => {
rl.question(`${message} (y/N): `, (answer) => {
rl.close();
resolve(answer.toLowerCase() === 'y' || answer.toLowerCase() === 'yes');
});
});
}
class PgMigrationManager {
constructor(region = 'us-west-1') {
this.mfaAuthenticated = false;
this.activeTunnels = [];
this.addedSecurityGroupRules = [];
this.region = region;
this.dbConnector = new database_connector_1.DatabaseConnector(region);
this.cfnManager = new cloudformation_manager_1.CloudFormationManager(region);
this.migrationManager = new migration_manager_1.MigrationManager(region);
this.mfaAuth = new mfa_auth_1.MfaAuthenticator(region);
this.ec2Client = new client_ec2_1.EC2Client({ region });
this.ssmClient = new client_ssm_1.SSMClient({ region });
}
/**
* Handle AWS API calls with automatic MFA authentication
*/
async callWithMfaRetry(operation) {
try {
return await operation();
}
catch (error) {
if (this.mfaAuth.isMfaRequired(error) && !this.mfaAuthenticated) {
console.log(chalk_1.default.yellow('โ ๏ธ MFA authentication required for AWS access'));
const credentials = await this.mfaAuth.authenticateWithMfa();
this.mfaAuth.applyCredentials(credentials);
const clientConfig = {
region: this.region,
credentials: {
accessKeyId: credentials.accessKeyId,
secretAccessKey: credentials.secretAccessKey,
sessionToken: credentials.sessionToken
}
};
this.ec2Client = new client_ec2_1.EC2Client(clientConfig);
this.ssmClient = new client_ssm_1.SSMClient(clientConfig);
this.mfaAuthenticated = true;
return await operation();
}
throw error;
}
}
/**
* Discover database connection info using existing proven patterns
*/
async discoverDatabase(config) {
console.log(chalk_1.default.blue(`๐ Discovering ${config.type} database connection...`));
if (config.type === 'aws-managed') {
// Try to discover from SSM parameter first
const dbInfo = await this.discoverFromSSMParameter(config.environment, config.database);
if (dbInfo) {
// Found in SSM - could be legacy (direct password) or CDK (secret ARN)
return dbInfo;
}
// Fallback: Use existing target database discovery (for databases only in CloudFormation outputs)
const targetDatabases = await this.migrationManager.discoverTargetDatabases(config.environment);
const targetDb = targetDatabases.find(db => db.name === config.database);
if (!targetDb) {
throw new Error(`Database ${config.database} not found in environment ${config.environment}. Available databases: ${targetDatabases.map(db => db.name).join(', ')}`);
}
// Get password from secret
const password = await this.dbConnector.getDatabasePassword(config.environment, config.database);
// Use existing security group discovery
const securityGroups = await this.cfnManager.discoverDatabaseSecurityGroups(targetDb.endpoint, targetDb.endpoint);
return {
endpoint: targetDb.endpoint,
port: parseInt(targetDb.port),
database: config.database === 'indicator' ? 'indicator_db' : `${config.database}_db`,
username: 'fiftyten',
password,
type: 'rds',
securityGroupIds: securityGroups.targetSecurityGroupIds
};
}
else {
// External database - discover security groups if it's RDS
let securityGroupIds;
if (config.endpoint?.includes('.rds.amazonaws.com')) {
try {
const securityGroups = await this.cfnManager.discoverDatabaseSecurityGroups(config.endpoint, config.endpoint);
securityGroupIds = securityGroups.legacySecurityGroupIds;
}
catch (error) {
console.log(chalk_1.default.gray(' Could not discover security groups - treating as external database'));
}
}
return {
endpoint: config.endpoint,
port: 5432,
database: config.database || 'postgres',
username: config.username || 'postgres',
password: config.password || '',
type: securityGroupIds ? 'rds' : 'external',
securityGroupIds
};
}
}
/**
* Discover database from SSM parameter (handles both legacy and CDK patterns)
*/
async discoverFromSSMParameter(environment, database) {
// Try multiple SSM parameter patterns
const patterns = [
`/indicator/indicator-api/${environment}/${database}-database-environment-variables`, // legacy-database-environment-variables
`/indicator/indicator-api/${environment}/database-environment-variables`, // Standard pattern
`/indicator/${database}-api/${environment}/database-environment-variables` // Other app pattern
];
for (const parameterName of patterns) {
try {
console.log(chalk_1.default.gray(` Trying SSM parameter: ${parameterName}`));
const response = await this.callWithMfaRetry(async () => {
return await this.ssmClient.send(new client_ssm_1.GetParameterCommand({ Name: parameterName }));
});
if (!response.Parameter?.Value) {
continue;
}
const dbInfo = JSON.parse(response.Parameter.Value);
console.log(chalk_1.default.green(`โ
Found database in SSM: ${parameterName}`));
// Determine if this uses direct password or secret ARN
let password;
if (dbInfo.DATABASE_PASSWORD) {
// Legacy pattern - direct password
console.log(chalk_1.default.blue(' Using direct password from SSM'));
password = dbInfo.DATABASE_PASSWORD;
}
else if (dbInfo.DATABASE_SECRET_ARN) {
// CDK pattern - secret ARN
console.log(chalk_1.default.blue(' Getting password from Secrets Manager'));
password = await this.getSecretPassword(dbInfo.DATABASE_SECRET_ARN);
}
else {
console.log(chalk_1.default.yellow(' No password or secret ARN found in parameter'));
continue;
}
// Use existing security group discovery
const securityGroups = await this.cfnManager.discoverDatabaseSecurityGroups(dbInfo.DATABASE_HOST, dbInfo.DATABASE_HOST);
return {
endpoint: dbInfo.DATABASE_HOST,
port: parseInt(dbInfo.DATABASE_PORT),
database: dbInfo.DATABASE_NAME,
username: dbInfo.DATABASE_USER,
password,
type: 'rds',
securityGroupIds: securityGroups.legacySecurityGroupIds || securityGroups.targetSecurityGroupIds
};
}
catch (error) {
// Parameter not found, try next pattern
continue;
}
}
// No SSM parameter found
return null;
}
/**
* Get password from AWS Secrets Manager
*/
async getSecretPassword(secretArn) {
// Use the existing method from DatabaseConnector
const secretsClient = new (await Promise.resolve().then(() => __importStar(require('@aws-sdk/client-secrets-manager')))).SecretsManagerClient({ region: this.region });
const response = await this.callWithMfaRetry(async () => {
const { GetSecretValueCommand } = await Promise.resolve().then(() => __importStar(require('@aws-sdk/client-secrets-manager')));
return await secretsClient.send(new GetSecretValueCommand({
SecretId: secretArn
}));
});
if (!response.SecretString) {
throw new Error(`Secret value not found: ${secretArn}`);
}
const secretValue = JSON.parse(response.SecretString);
return secretValue.password || secretValue.Password || secretValue.SECRET || '';
}
/**
* Setup tunnel with automatic security group configuration
*/
async setupTunnel(database, localPort, environment) {
if (database.type === 'external') {
throw new Error('Tunnels not needed for external databases - connect directly');
}
console.log(chalk_1.default.blue(`๐ Setting up tunnel for ${database.endpoint}...`));
// Find bastion instance using existing pattern
const bastionInstanceId = await this.getBastionInstanceId(environment);
// Configure security groups if needed
if (database.securityGroupIds?.length) {
await this.configureTunnelAccess(database, bastionInstanceId);
}
// Create tunnel process
const tunnelProcess = await this.createTunnelProcess(bastionInstanceId, database.endpoint, database.port, localPort);
// Wait for tunnel to be ready and verify it's actually working
console.log(chalk_1.default.gray(` Waiting for tunnel on port ${localPort}...`));
// Wait up to 30 seconds for the tunnel to establish
const maxRetries = 30;
let retries = 0;
let tunnelReady = false;
while (retries < maxRetries && !tunnelReady) {
await new Promise(resolve => setTimeout(resolve, 1000)); // Wait 1 second
try {
// Check if port is actually listening
const net = await Promise.resolve().then(() => __importStar(require('net')));
const testSocket = new net.Socket();
await new Promise((resolve, reject) => {
testSocket.setTimeout(1000);
testSocket.on('connect', () => {
testSocket.destroy();
tunnelReady = true;
resolve();
});
testSocket.on('timeout', () => {
testSocket.destroy();
reject(new Error('timeout'));
});
testSocket.on('error', () => {
testSocket.destroy();
reject(new Error('connection failed'));
});
testSocket.connect(localPort, 'localhost');
});
}
catch (error) {
// Port not ready yet, continue waiting
retries++;
}
}
if (!tunnelReady) {
// Kill the tunnel process since it failed
if (tunnelProcess && !tunnelProcess.killed) {
tunnelProcess.kill('SIGTERM');
}
throw new Error(`Tunnel failed to establish after ${maxRetries} seconds. Check SSM Agent connectivity on bastion host.`);
}
const tunnelInfo = {
...database,
tunnelProcess,
localPort,
bastionInstanceId,
endpoint: 'localhost',
port: localPort
};
this.activeTunnels.push(tunnelInfo);
console.log(chalk_1.default.green(`โ
Tunnel ready: localhost:${localPort}`));
return tunnelInfo;
}
/**
* Configure security groups for tunnel access using existing patterns
*/
async configureTunnelAccess(database, bastionInstanceId) {
console.log(chalk_1.default.blue('๐ง Configuring security groups for tunnel access...'));
// Get bastion security group
const bastionSecurityGroupId = await this.getBastionSecurityGroup(bastionInstanceId);
// Add temporary rules to database security groups
for (const dbSecurityGroupId of database.securityGroupIds) {
try {
const rule = {
IpProtocol: 'tcp',
FromPort: database.port,
ToPort: database.port,
UserIdGroupPairs: [{
GroupId: bastionSecurityGroupId,
Description: `Temporary tunnel access from bastion ${bastionInstanceId}`
}]
};
await this.callWithMfaRetry(async () => {
return await this.ec2Client.send(new client_ec2_1.AuthorizeSecurityGroupIngressCommand({
GroupId: dbSecurityGroupId,
IpPermissions: [rule]
}));
});
this.addedSecurityGroupRules.push({ groupId: dbSecurityGroupId, rule });
console.log(chalk_1.default.green(` โ
Added ingress rule to ${dbSecurityGroupId}`));
}
catch (error) {
if (error instanceof Error && error.message.includes('already exists')) {
console.log(chalk_1.default.gray(` โน๏ธ Rule already exists in ${dbSecurityGroupId}`));
}
else {
console.log(chalk_1.default.yellow(` โ ๏ธ Could not add rule to ${dbSecurityGroupId}: ${error instanceof Error ? error.message : String(error)}`));
}
}
}
}
/**
* Get bastion instance ID for the given environment using CDK-first discovery
*
* Discovery Strategy:
* 1. Primary: CDK bastion pattern (indicator-bastion-{env}-host)
* 2. Fallback: Multiple naming patterns for compatibility
* 3. All bastions use Session Manager for secure access
*/
async getBastionInstanceId(environment) {
console.log(chalk_1.default.gray(` Searching for CDK bastion: indicator-bastion-${environment}-host`));
// Primary: Try CDK bastion pattern
try {
const response = await this.callWithMfaRetry(async () => {
return await this.ec2Client.send(new client_ec2_1.DescribeInstancesCommand({
Filters: [
{
Name: 'tag:Name',
Values: [`indicator-bastion-${environment}-host`]
},
{
Name: 'instance-state-name',
Values: ['running']
}
]
}));
});
const instances = response.Reservations?.flatMap(r => r.Instances || []) || [];
if (instances.length > 0) {
const instance = instances[0];
if (instance.InstanceId) {
const bastionName = instance.Tags?.find(t => t.Key === 'Name')?.Value || 'unknown';
console.log(chalk_1.default.green(`โ
Found CDK bastion: ${instance.InstanceId} (${bastionName})`));
return instance.InstanceId;
}
}
}
catch (error) {
console.log(chalk_1.default.yellow(` CDK bastion discovery failed, trying fallback patterns...`));
}
// Fallback: Try alternative naming patterns for compatibility
const fallbackPatterns = [
`bastion-${environment}`,
`${environment}-bastion`,
`indicator-${environment}-bastion`
];
for (const pattern of fallbackPatterns) {
try {
console.log(chalk_1.default.gray(` Trying pattern: ${pattern}`));
const response = await this.callWithMfaRetry(async () => {
return await this.ec2Client.send(new client_ec2_1.DescribeInstancesCommand({
Filters: [
{
Name: 'tag:Name',
Values: [pattern]
},
{
Name: 'instance-state-name',
Values: ['running']
}
]
}));
});
const instances = response.Reservations?.flatMap(r => r.Instances || []) || [];
if (instances.length > 0) {
const instance = instances[0];
if (instance.InstanceId) {
const bastionName = instance.Tags?.find(t => t.Key === 'Name')?.Value || 'unknown';
console.log(chalk_1.default.green(`โ
Found bastion: ${instance.InstanceId} (${bastionName})`));
return instance.InstanceId;
}
}
}
catch (error) {
console.log(chalk_1.default.gray(` Pattern ${pattern} not found, continuing...`));
}
}
throw new Error(`No running bastion instance found for environment: ${environment}. Tried CDK pattern (indicator-bastion-${environment}-host) and fallback patterns: ${fallbackPatterns.join(', ')}.`);
}
/**
* Get bastion security group
*/
async getBastionSecurityGroup(instanceId) {
const response = await this.callWithMfaRetry(async () => {
return await this.ec2Client.send(new client_ec2_1.DescribeInstancesCommand({
InstanceIds: [instanceId]
}));
});
const instance = response.Reservations?.[0]?.Instances?.[0];
const securityGroups = instance?.SecurityGroups || [];
if (securityGroups.length === 0) {
throw new Error(`No security groups found for bastion instance: ${instanceId}`);
}
return securityGroups[0].GroupId;
}
/**
* Create tunnel process
*/
async createTunnelProcess(bastionInstanceId, targetHost, targetPort, localPort) {
const sessionCommand = [
'ssm', 'start-session',
'--target', bastionInstanceId,
'--document-name', 'AWS-StartPortForwardingSessionToRemoteHost',
'--parameters', JSON.stringify({
host: [targetHost],
portNumber: [targetPort.toString()],
localPortNumber: [localPort.toString()]
}),
'--region', this.region
];
console.log(chalk_1.default.gray(` Starting tunnel: aws ${sessionCommand.join(' ')}`));
const tunnelProcess = (0, child_process_1.spawn)('aws', sessionCommand, {
stdio: ['inherit', 'pipe', 'pipe']
});
// Handle tunnel output
tunnelProcess.stdout?.on('data', (data) => {
const message = data.toString();
console.log(chalk_1.default.blue(` Tunnel stdout: ${message.trim()}`));
if (message.includes('Port forwarding session started')) {
console.log(chalk_1.default.green(' โ
Port forwarding session started'));
}
});
tunnelProcess.stderr?.on('data', (data) => {
const message = data.toString();
console.log(chalk_1.default.yellow(` Tunnel stderr: ${message.trim()}`));
});
return tunnelProcess;
}
/**
* Perform PostgreSQL dump and restore migration using universal approach
*/
async performPgMigration(config) {
console.log(chalk_1.default.blue('๐ Starting PostgreSQL dump/restore migration...'));
console.log('');
console.log(chalk_1.default.green('๐ Migration Configuration:'));
console.log(` Environment: ${chalk_1.default.yellow(config.environment)}`);
console.log(` Source Database: ${chalk_1.default.yellow(config.sourceDatabase)}`);
console.log(` Target Database: ${chalk_1.default.yellow(config.targetDatabase)}`);
console.log(` Data Only: ${chalk_1.default.yellow(config.dataOnly ? 'Yes' : 'No')}`);
if (config.skipTables?.length) {
console.log(` Skip Tables: ${chalk_1.default.yellow(config.skipTables.join(', '))}`);
}
if (config.includeTables?.length) {
console.log(` Include Tables: ${chalk_1.default.yellow(config.includeTables.join(', '))}`);
}
console.log('');
// Confirm migration
const confirm = await promptConfirmation('Start PostgreSQL migration with these settings?');
if (!confirm) {
console.log(chalk_1.default.yellow('Migration cancelled.'));
return;
}
try {
// Discover source database
let sourceDb;
if (config.sourceEndpoint && config.sourceUsername && config.sourcePassword) {
// External database
sourceDb = await this.discoverDatabase({
type: 'external',
endpoint: config.sourceEndpoint,
username: config.sourceUsername,
password: config.sourcePassword,
database: config.sourceDatabase
});
console.log(chalk_1.default.green(`โ
External source database: ${sourceDb.endpoint}`));
}
else {
// AWS managed database
sourceDb = await this.discoverDatabase({
type: 'aws-managed',
environment: config.environment,
database: config.sourceDatabase
});
}
// Discover target database (always AWS managed)
const targetDb = await this.discoverDatabase({
type: 'aws-managed',
environment: config.environment,
database: config.targetDatabase
});
// Execute sequential migration to avoid concurrent tunnel issues
await this.executeSequentialMigration(sourceDb, targetDb, config);
console.log(chalk_1.default.green('โ
PostgreSQL migration completed successfully!'));
}
catch (error) {
console.error(chalk_1.default.red('โ Migration failed:'), error instanceof Error ? error.message : String(error));
throw error;
}
finally {
// Cleanup any remaining resources
await this.cleanup();
}
}
/**
* Execute sequential migration to avoid concurrent tunnel conflicts
*/
async executeSequentialMigration(sourceDb, targetDb, config) {
const fs = await Promise.resolve().then(() => __importStar(require('fs')));
const path = await Promise.resolve().then(() => __importStar(require('path')));
const os = await Promise.resolve().then(() => __importStar(require('os')));
// Create temporary file for dump
const tempDir = os.tmpdir();
const dumpFile = path.join(tempDir, `pg-migration-${Date.now()}.sql`);
console.log(chalk_1.default.blue('๐ค Phase 1: Dumping source database...'));
let sourceConnection;
try {
// Phase 1: Setup source connection and dump
if (sourceDb.type === 'external') {
sourceConnection = sourceDb;
console.log(chalk_1.default.green(`โ
Using external source: ${sourceDb.endpoint}`));
}
else {
sourceConnection = await this.setupTunnel(sourceDb, 5434, config.environment);
}
// Dump to file
await this.dumpToFile(sourceConnection, config, dumpFile);
}
finally {
// Close source tunnel if it was created
if (sourceConnection && 'tunnelProcess' in sourceConnection) {
console.log(chalk_1.default.gray(' Closing source tunnel...'));
if (sourceConnection.tunnelProcess && !sourceConnection.tunnelProcess.killed) {
sourceConnection.tunnelProcess.kill('SIGTERM');
}
// Remove source tunnel from active tunnels list
this.activeTunnels = this.activeTunnels.filter(t => t.localPort !== sourceConnection.localPort);
}
}
console.log(chalk_1.default.blue('๐ฅ Phase 2: Restoring to target database...'));
let targetConnection;
try {
// Phase 2: Setup target connection and restore
targetConnection = await this.setupTunnel(targetDb, 5433, config.environment);
// Restore from file
await this.restoreFromFile(targetConnection, dumpFile);
}
finally {
// Close target tunnel
if (targetConnection && 'tunnelProcess' in targetConnection) {
console.log(chalk_1.default.gray(' Closing target tunnel...'));
if (targetConnection.tunnelProcess && !targetConnection.tunnelProcess.killed) {
targetConnection.tunnelProcess.kill('SIGTERM');
}
// Remove target tunnel from active tunnels list
this.activeTunnels = this.activeTunnels.filter(t => t.localPort !== targetConnection.localPort);
}
// Clean up temporary file
try {
if (fs.existsSync(dumpFile)) {
fs.unlinkSync(dumpFile);
console.log(chalk_1.default.gray(' Cleaned up temporary dump file'));
}
}
catch (error) {
console.log(chalk_1.default.yellow(` Warning: Could not clean up temporary file ${dumpFile}`));
}
}
}
/**
* Execute pg_dump and psql restore using sequential approach to avoid concurrent tunnel issues
*/
async executePgDumpRestore(source, target, config) {
const fs = await Promise.resolve().then(() => __importStar(require('fs')));
const path = await Promise.resolve().then(() => __importStar(require('path')));
const os = await Promise.resolve().then(() => __importStar(require('os')));
// Create temporary file for dump
const tempDir = os.tmpdir();
const dumpFile = path.join(tempDir, `pg-migration-${Date.now()}.sql`);
console.log(chalk_1.default.blue('๐ค Phase 1: Dumping source database to temporary file...'));
console.log(chalk_1.default.gray(` Temp file: ${dumpFile}`));
try {
// Phase 1: Dump source database to file
await this.dumpToFile(source, config, dumpFile);
console.log(chalk_1.default.blue('๐ฅ Phase 2: Restoring from temporary file to target database...'));
// Phase 2: Restore from file to target database
await this.restoreFromFile(target, dumpFile);
console.log(chalk_1.default.green('โ
Migration completed successfully!'));
}
finally {
// Clean up temporary file
try {
if (fs.existsSync(dumpFile)) {
fs.unlinkSync(dumpFile);
console.log(chalk_1.default.gray(' Cleaned up temporary dump file'));
}
}
catch (error) {
console.log(chalk_1.default.yellow(` Warning: Could not clean up temporary file ${dumpFile}`));
}
}
}
/**
* Dump source database to a temporary file
*/
async dumpToFile(source, config, outputFile) {
const fs = await Promise.resolve().then(() => __importStar(require('fs')));
// Build pg_dump command
const dumpArgs = [
'--host', source.endpoint,
'--port', source.port.toString(),
'--username', source.username,
'--dbname', source.database,
'--verbose',
'--no-password',
'--file', outputFile
];
// Add data-only flag if specified
if (config.dataOnly) {
dumpArgs.push('--data-only');
}
// Add table filtering
if (config.skipTables?.length) {
config.skipTables.forEach(table => {
dumpArgs.push('--exclude-table', table);
});
}
if (config.includeTables?.length) {
config.includeTables.forEach(table => {
dumpArgs.push('--table', table);
});
}
// Set environment variables for source connection
const sourceEnv = {
...process.env,
PGPASSWORD: source.password
};
console.log(chalk_1.default.gray(` Running: pg_dump ${dumpArgs.join(' ')}`));
return new Promise((resolve, reject) => {
const dumpProcess = (0, child_process_1.spawn)('pg_dump', dumpArgs, {
env: sourceEnv,
stdio: ['inherit', 'inherit', 'pipe']
});
let dumpError = '';
dumpProcess.stderr.on('data', (data) => {
const message = data.toString();
// Filter out pg_dump info messages and only show real errors
if (!message.includes('reading') && !message.includes('dumping') && message.includes('ERROR')) {
console.error(chalk_1.default.red(' Dump error:'), message);
dumpError += message;
}
else {
// Show progress info in gray
console.log(chalk_1.default.gray(` ${message.trim()}`));
}
});
dumpProcess.on('close', (code) => {
if (code === 0) {
// Verify file was created and has content
if (fs.existsSync(outputFile) && fs.statSync(outputFile).size > 0) {
const fileSize = (fs.statSync(outputFile).size / 1024 / 1024).toFixed(2);
console.log(chalk_1.default.green(` โ
Dump completed successfully (${fileSize} MB)`));
resolve();
}
else {
reject(new Error('Dump file was not created or is empty'));
}
}
else {
reject(new Error(`pg_dump failed with code ${code}${dumpError ? ': ' + dumpError : ''}`));
}
});
dumpProcess.on('error', (error) => {
reject(new Error(`Failed to start pg_dump: ${error.message}`));
});
});
}
/**
* Restore from temporary file to target database
*/
async restoreFromFile(target, inputFile) {
const fs = await Promise.resolve().then(() => __importStar(require('fs')));
if (!fs.existsSync(inputFile)) {
throw new Error(`Dump file not found: ${inputFile}`);
}
// Build psql restore command
const restoreArgs = [
'--host', target.endpoint,
'--port', target.port.toString(),
'--username', target.username,
'--dbname', target.database,
'--no-password',
'--file', inputFile
];
// Set environment variables for target connection
const targetEnv = {
...process.env,
PGPASSWORD: target.password
};
console.log(chalk_1.default.gray(` Running: psql ${restoreArgs.join(' ')}`));
return new Promise((resolve, reject) => {
const restoreProcess = (0, child_process_1.spawn)('psql', restoreArgs, {
env: targetEnv,
stdio: ['inherit', 'inherit', 'pipe']
});
let restoreError = '';
restoreProcess.stderr.on('data', (data) => {
const message = data.toString();
// Filter out psql info messages and only show real errors
if (message.includes('ERROR')) {
console.error(chalk_1.default.red(' Restore error:'), message);
restoreError += message;
}
else {
// Show progress info in gray
console.log(chalk_1.default.gray(` ${message.trim()}`));
}
});
restoreProcess.on('close', (code) => {
if (code === 0) {
console.log(chalk_1.default.green(' โ
Restore completed successfully'));
resolve();
}
else {
reject(new Error(`psql restore failed with code ${code}${restoreError ? ': ' + restoreError : ''}`));
}
});
restoreProcess.on('error', (error) => {
reject(new Error(`Failed to start psql: ${error.message}`));
});
});
}
/**
* Test connection to both databases using universal approach
*/
async testConnections(config) {
console.log(chalk_1.default.blue('๐ Testing database connections...'));
let sourceConnection;
let targetConnection;
try {
// Setup source connection
if (config.sourceEndpoint && config.sourceUsername && config.sourcePassword) {
sourceConnection = await this.discoverDatabase({
type: 'external',
endpoint: config.sourceEndpoint,
username: config.sourceUsername,
password: config.sourcePassword,
database: config.sourceDatabase
});
}
else {
const sourceDb = await this.discoverDatabase({
type: 'aws-managed',
environment: config.environment,
database: config.sourceDatabase
});
sourceConnection = await this.setupTunnel(sourceDb, 5434, config.environment);
}
// Setup target connection
const targetDb = await this.discoverDatabase({
type: 'aws-managed',
environment: config.environment,
database: config.targetDatabase
});
targetConnection = await this.setupTunnel(targetDb, 5433, config.environment);
// Test connections
console.log(chalk_1.default.blue(' Testing source database connection...'));
await this.testConnection(sourceConnection, 'Source');
console.log(chalk_1.default.blue(' Testing target database connection...'));
await this.testConnection(targetConnection, 'Target');
console.log(chalk_1.default.green('โ
All database connections successful!'));
}
catch (error) {
console.error(chalk_1.default.red('โ Connection test failed:'), error instanceof Error ? error.message : String(error));
throw error;
}
finally {
await this.cleanup();
}
}
/**
* Test a single database connection
*/
async testConnection(connection, label) {
return new Promise((resolve, reject) => {
const testArgs = [
'--host', connection.endpoint,
'--port', connection.port.toString(),
'--username', connection.username,
'--dbname', connection.database,
'--command', 'SELECT version();',
'--no-password'
];
const testEnv = {
...process.env,
PGPASSWORD: connection.password
};
const testProcess = (0, child_process_1.spawn)('psql', testArgs, {
env: testEnv,
stdio: ['inherit', 'pipe', 'pipe']
});
let output = '';
let error = '';
testProcess.stdout.on('data', (data) => {
output += data.toString();
});
testProcess.stderr.on('data', (data) => {
error += data.toString();
});
testProcess.on('close', (code) => {
if (code === 0) {
console.log(chalk_1.default.green(` โ
${label} connection successful`));
if (output.includes('PostgreSQL')) {
const version = output.match(/PostgreSQL [\d.]+/)?.[0];
if (version) {
console.log(chalk_1.default.gray(` ${version}`));
}
}
resolve();
}
else {
reject(new Error(`${label} connection failed: ${error || 'Unknown error'}`));
}
});
});
}
/**
* Cleanup all tunnels and security group rules using existing patterns
*/
async cleanup() {
console.log(chalk_1.default.blue('๐งน Cleaning up tunnels and security group rules...'));
// Close tunnels
for (const tunnel of this.activeTunnels) {
if (tunnel.tunnelProcess && !tunnel.tunnelProcess.killed) {
tunnel.tunnelProcess.kill('SIGTERM');
console.log(chalk_1.default.gray(` Closed tunnel on port ${tunnel.localPort}`));
}
}
this.activeTunnels = [];
// Remove security group rules using existing pattern
for (const rule of this.addedSecurityGroupRules) {
try {
await this.callWithMfaRetry(async () => {
return await this.ec2Client.send(new client_ec2_1.RevokeSecurityGroupIngressCommand({
GroupId: rule.groupId,
IpPermissions: [rule.rule]
}));
});
console.log(chalk_1.default.green(` โ
Removed rule from ${rule.groupId}`));
}
catch (error) {
console.log(chalk_1.default.yellow(` โ ๏ธ Could not remove rule from ${rule.groupId}: ${error instanceof Error ? error.message : String(error)}`));
}
}
this.addedSecurityGroupRules = [];
// Give processes time to clean up
await new Promise(resolve => setTimeout(resolve, 1000));
console.log(chalk_1.default.green('โ
Cleanup completed'));
}
/**
* Get migration statistics by comparing row counts
*/
async getMigrationStats(config) {
console.log(chalk_1.default.blue('๐ Getting migration statistics...'));
let sourceConnection;
let targetConnection;
try {
// Setup connections
if (config.sourceEndpoint && config.sourceUsername && config.sourcePassword) {
sourceConnection = await this.discoverDatabase({
type: 'external',
endpoint: config.sourceEndpoint,
username: config.sourceUsername,
password: config.sourcePassword,
database: config.sourceDatabase
});
}
else {
const sourceDb = await this.discoverDatabase({
type: 'aws-managed',
environment: config.environment,
database: config.sourceDatabase
});
sourceConnection = await this.setupTunnel(sourceDb, 5434, config.environment);
}
const targetDb = await this.discoverDatabase({
type: 'aws-managed',
environment: config.environment,
database: config.targetDatabase
});
targetConnection = await this.setupTunnel(targetDb, 5433, config.environment);
// Get table list from source
const sourceTables = await this.getTableList(sourceConnection);
const targetTables = await this.getTableList(targetConnection);
console.log(chalk_1.default.green('๐ Migration Statistics:'));
console.log('');
console.log(chalk_1.default.gray(' Table Name'.padEnd(30) + 'Source Rows'.padEnd(15) + 'Target Rows'.padEnd(15) + 'Status'));
console.log(chalk_1.default.gray(' ' + '-'.repeat(70)));
for (const table of sourceTables) {
const sourceCount = await this.getTableRowCount(sourceConnection, table);
const targetCount = targetTables.includes(table) ? await this.getTableRowCount(targetConnection, table) : 0;
const status = sourceCount === targetCount ?
chalk_1.default.green('โ
Match') :
chalk_1.default.yellow(`โ ๏ธ Diff (${targetCount - sourceCount})`);
console.log(` ${table.padEnd(30)}${sourceCount.toString().padEnd(15)}${targetCount.toString().padEnd(15)}${status}`);
}
console.log('');
console.log(chalk_1.default.green('โ
Statistics generated successfully'));
}
catch (error) {
console.error(chalk_1.default.red('โ Failed to get statistics:'), error instanceof Error ? error.message : String(error));
throw error;
}
finally {
await this.cleanup();
}
}
/**
* Get list of tables from database
*/
async getTableList(connection) {
return new Promise((resolve, reject) => {
const queryArgs = [
'--host', connection.endpoint,
'--port', connection.port.toString(),
'--username', connection.username,
'--dbname', connection.database,
'--tuples-only',
'--no-align',
'--command', "SELECT tablename FROM pg_tables WHERE schemaname = 'public' ORDER BY tablename;",
'--no-password'
];
const queryEnv = {
...process.env,
PGPASSWORD: connection.password
};
const queryProcess = (0, child_process_1.spawn)('psql', queryArgs, {
env: queryEnv,
stdio: ['inherit', 'pipe', 'pipe']
});
let output = '';
let error = '';
queryProcess.stdout.on('data', (data) => {
output += data.toString();
});
queryProcess.stderr.on('data', (data) => {
error += data.toString();
});
queryProcess.on('close', (code) => {
if (code === 0) {
const tables = output.trim().split('\n').filter(line => line.trim().length > 0);
resolve(tables);
}
else {
reject(new Error(`Failed to get table list: ${error}`));
}
});
});
}
/**
* Get row count for a specific table
*/
async getTableRowCount(connection, tableName) {
return new Promise((resolve, reject) => {
const queryArgs = [
'--host', connection.endpoint,
'--port', connection.port.toString(),
'--username', connection.username,
'--dbname', connection.database,
'--tuples-only',
'--no-align',
'--command', `SELECT COUNT(*) FROM "${tableName}";`,
'--no-password'
];
const queryEnv = {
...process.env,
PGPASSWORD: connection.password
};
const queryProcess = (0, child_process_1.spawn)('psql', queryArgs, {
env: queryEnv,
stdio: ['inherit', 'pipe', 'pipe']
});
let output = '';
let error = '';
queryProcess.stdout.on('data', (data) => {
output += data.toString();
});
queryProcess.stderr.on('data', (data) => {
error += data.toString();
});
queryProcess.on('close', (code) => {
if (code === 0) {
const count = parseInt(output.trim()) || 0;
resolve(count);
}
else {
reject(new Error(`Failed to get row count for ${tableName}: ${error}`));
}
});
});
}
}
exports.PgMigrationManager = PgMigrationManager;
//# sourceMappingURL=pg-migration-manager.js.map