backend-mcp
Version:
Generador automΓ‘tico de backends con Node.js, Express, Prisma y mΓ³dulos configurables. Servidor MCP compatible con npx para agentes IA. Soporta PostgreSQL, MySQL, MongoDB y SQLite.
1,009 lines (842 loc) β’ 27.7 kB
JavaScript
const fs = require('fs');
const path = require('path');
class PostgresModule {
constructor() {
this.name = 'database/postgres';
this.version = '1.0.0';
this.description = 'PostgreSQL database connection and management module';
}
async init(config = {}) {
console.log('π Initializing PostgreSQL module...');
const {
projectPath = process.cwd(),
generateMigrations = true,
generateSeeds = true,
setupPrisma = true,
createBackupScripts = true
} = config;
try {
// Create directory structure
await this.createDirectoryStructure(projectPath);
// Generate configuration files
await this.generateDatabaseConfig(projectPath);
if (setupPrisma) {
await this.generatePrismaSchema(projectPath);
}
if (generateMigrations) {
await this.generateMigrations(projectPath);
}
if (generateSeeds) {
await this.generateSeeds(projectPath);
}
// Generate utility files
await this.generateUtilities(projectPath);
if (createBackupScripts) {
await this.generateBackupScripts(projectPath);
}
// Update package.json
await this.updatePackageJson(projectPath);
console.log('β
PostgreSQL module initialized successfully!');
return this.getGeneratedFiles();
} catch (error) {
console.error('β Error initializing PostgreSQL module:', error);
throw error;
}
}
async createDirectoryStructure(projectPath) {
const dirs = [
'config',
'prisma',
'prisma/migrations',
'prisma/seeds',
'src/database',
'src/database/types',
'src/database/utils',
'scripts/database'
];
for (const dir of dirs) {
const dirPath = path.join(projectPath, dir);
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true });
}
}
}
async generateDatabaseConfig(projectPath) {
const configContent = `import { Pool, PoolConfig } from 'pg';
import dotenv from 'dotenv';
dotenv.config();
interface DatabaseConfig extends PoolConfig {
host: string;
port: number;
database: string;
user: string;
password: string;
ssl?: boolean | object;
max?: number;
idleTimeoutMillis?: number;
connectionTimeoutMillis?: number;
}
const config: DatabaseConfig = {
host: process.env.POSTGRES_HOST || 'localhost',
port: parseInt(process.env.POSTGRES_PORT || '5432'),
database: process.env.POSTGRES_DB || 'myapp',
user: process.env.POSTGRES_USER || 'postgres',
password: process.env.POSTGRES_PASSWORD || '',
ssl: process.env.POSTGRES_SSL === 'true' ? { rejectUnauthorized: false } : false,
max: parseInt(process.env.POSTGRES_POOL_SIZE || '20'),
idleTimeoutMillis: parseInt(process.env.POSTGRES_TIMEOUT || '30000'),
connectionTimeoutMillis: parseInt(process.env.POSTGRES_CONNECTION_TIMEOUT || '2000'),
};
export const databaseConfig = config;
// Connection pool
export const pool = new Pool(config);
// Health check function
export async function checkDatabaseHealth(): Promise<boolean> {
try {
const client = await pool.connect();
await client.query('SELECT 1');
client.release();
return true;
} catch (error) {
console.error('Database health check failed:', error);
return false;
}
}
// Graceful shutdown
export async function closeDatabaseConnection(): Promise<void> {
try {
await pool.end();
console.log('Database connection pool closed');
} catch (error) {
console.error('Error closing database connection:', error);
}
}
// Connection event handlers
pool.on('connect', (client) => {
console.log('New database client connected');
});
pool.on('error', (err) => {
console.error('Database pool error:', err);
});
pool.on('remove', () => {
console.log('Database client removed from pool');
});
export default pool;`;
fs.writeFileSync(
path.join(projectPath, 'config/database.ts'),
configContent
);
}
async generatePrismaSchema(projectPath) {
const schemaContent = `// This is your Prisma schema file,
// learn more about it in the docs: https://pris.ly/d/prisma-schema
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
model User {
id String @id @default(uuid()) @db.Uuid
email String @unique @db.VarChar(255)
passwordHash String @map("password_hash") @db.VarChar(255)
firstName String @map("first_name") @db.VarChar(100)
lastName String @map("last_name") @db.VarChar(100)
role String @default("user") @db.VarChar(50)
isActive Boolean @default(true) @map("is_active")
emailVerified Boolean @default(false) @map("email_verified")
createdAt DateTime @default(now()) @map("created_at")
updatedAt DateTime @updatedAt @map("updated_at")
@@index([email])
@@index([role])
@@index([createdAt])
@@map("users")
}
model Category {
id String @id @default(uuid()) @db.Uuid
name String @unique @db.VarChar(100)
description String? @db.Text
parentId String? @map("parent_id") @db.Uuid
isActive Boolean @default(true) @map("is_active")
createdAt DateTime @default(now()) @map("created_at")
updatedAt DateTime @updatedAt @map("updated_at")
parent Category? @relation("CategoryHierarchy", fields: [parentId], references: [id])
children Category[] @relation("CategoryHierarchy")
products Product[]
@@index([name])
@@index([parentId])
@@map("categories")
}
model Product {
id String @id @default(uuid()) @db.Uuid
name String @db.VarChar(200)
description String? @db.Text
sku String @unique @db.VarChar(100)
price Decimal @db.Decimal(10, 2)
categoryId String @map("category_id") @db.Uuid
stockQuantity Int @default(0) @map("stock_quantity")
isActive Boolean @default(true) @map("is_active")
createdAt DateTime @default(now()) @map("created_at")
updatedAt DateTime @updatedAt @map("updated_at")
category Category @relation(fields: [categoryId], references: [id])
inventoryMovements InventoryMovement[]
@@index([sku])
@@index([categoryId])
@@index([price])
@@index([stockQuantity])
@@map("products")
}
model InventoryMovement {
id String @id @default(uuid()) @db.Uuid
productId String @map("product_id") @db.Uuid
movementType String @map("movement_type") @db.VarChar(20)
quantity Int
reference String? @db.VarChar(100)
notes String? @db.Text
createdAt DateTime @default(now()) @map("created_at")
product Product @relation(fields: [productId], references: [id])
@@index([productId])
@@index([movementType])
@@index([createdAt])
@@map("inventory_movements")
}`;
fs.writeFileSync(
path.join(projectPath, 'prisma/schema.prisma'),
schemaContent
);
}
async generateMigrations(projectPath) {
const migrationContent = `-- CreateExtension
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- CreateTable
CREATE TABLE "users" (
"id" UUID NOT NULL DEFAULT uuid_generate_v4(),
"email" VARCHAR(255) NOT NULL,
"password_hash" VARCHAR(255) NOT NULL,
"first_name" VARCHAR(100) NOT NULL,
"last_name" VARCHAR(100) NOT NULL,
"role" VARCHAR(50) NOT NULL DEFAULT 'user',
"is_active" BOOLEAN NOT NULL DEFAULT true,
"email_verified" BOOLEAN NOT NULL DEFAULT false,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
CONSTRAINT "users_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "categories" (
"id" UUID NOT NULL DEFAULT uuid_generate_v4(),
"name" VARCHAR(100) NOT NULL,
"description" TEXT,
"parent_id" UUID,
"is_active" BOOLEAN NOT NULL DEFAULT true,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
CONSTRAINT "categories_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "products" (
"id" UUID NOT NULL DEFAULT uuid_generate_v4(),
"name" VARCHAR(200) NOT NULL,
"description" TEXT,
"sku" VARCHAR(100) NOT NULL,
"price" DECIMAL(10,2) NOT NULL,
"category_id" UUID NOT NULL,
"stock_quantity" INTEGER NOT NULL DEFAULT 0,
"is_active" BOOLEAN NOT NULL DEFAULT true,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
CONSTRAINT "products_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "inventory_movements" (
"id" UUID NOT NULL DEFAULT uuid_generate_v4(),
"product_id" UUID NOT NULL,
"movement_type" VARCHAR(20) NOT NULL,
"quantity" INTEGER NOT NULL,
"reference" VARCHAR(100),
"notes" TEXT,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "inventory_movements_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "users_email_key" ON "users"("email");
-- CreateIndex
CREATE INDEX "users_email_idx" ON "users"("email");
-- CreateIndex
CREATE INDEX "users_role_idx" ON "users"("role");
-- CreateIndex
CREATE INDEX "users_created_at_idx" ON "users"("created_at");
-- CreateIndex
CREATE UNIQUE INDEX "categories_name_key" ON "categories"("name");
-- CreateIndex
CREATE INDEX "categories_name_idx" ON "categories"("name");
-- CreateIndex
CREATE INDEX "categories_parent_id_idx" ON "categories"("parent_id");
-- CreateIndex
CREATE UNIQUE INDEX "products_sku_key" ON "products"("sku");
-- CreateIndex
CREATE INDEX "products_sku_idx" ON "products"("sku");
-- CreateIndex
CREATE INDEX "products_category_id_idx" ON "products"("category_id");
-- CreateIndex
CREATE INDEX "products_price_idx" ON "products"("price");
-- CreateIndex
CREATE INDEX "products_stock_quantity_idx" ON "products"("stock_quantity");
-- CreateIndex
CREATE INDEX "inventory_movements_product_id_idx" ON "inventory_movements"("product_id");
-- CreateIndex
CREATE INDEX "inventory_movements_movement_type_idx" ON "inventory_movements"("movement_type");
-- CreateIndex
CREATE INDEX "inventory_movements_created_at_idx" ON "inventory_movements"("created_at");
-- AddForeignKey
ALTER TABLE "categories" ADD CONSTRAINT "categories_parent_id_fkey" FOREIGN KEY ("parent_id") REFERENCES "categories"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "products" ADD CONSTRAINT "products_category_id_fkey" FOREIGN KEY ("category_id") REFERENCES "categories"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "inventory_movements" ADD CONSTRAINT "inventory_movements_product_id_fkey" FOREIGN KEY ("product_id") REFERENCES "products"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- Create function for updated_at trigger
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';
-- Create triggers for updated_at
CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
CREATE TRIGGER update_categories_updated_at BEFORE UPDATE ON categories FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
CREATE TRIGGER update_products_updated_at BEFORE UPDATE ON products FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();`;
const migrationDir = path.join(projectPath, 'prisma/migrations/001_initial');
if (!fs.existsSync(migrationDir)) {
fs.mkdirSync(migrationDir, { recursive: true });
}
fs.writeFileSync(
path.join(migrationDir, 'migration.sql'),
migrationContent
);
}
async generateSeeds(projectPath) {
const seedContent = `import { PrismaClient } from '@prisma/client';
import bcrypt from 'bcrypt';
const prisma = new PrismaClient();
async function main() {
console.log('π± Starting database seeding...');
// Create admin user
const adminPassword = await bcrypt.hash('admin123', 10);
const adminUser = await prisma.user.upsert({
where: { email: 'admin@example.com' },
update: {},
create: {
email: 'admin@example.com',
passwordHash: adminPassword,
firstName: 'Admin',
lastName: 'User',
role: 'admin',
emailVerified: true,
},
});
// Create test user
const userPassword = await bcrypt.hash('user123', 10);
const testUser = await prisma.user.upsert({
where: { email: 'user@example.com' },
update: {},
create: {
email: 'user@example.com',
passwordHash: userPassword,
firstName: 'Test',
lastName: 'User',
role: 'user',
emailVerified: true,
},
});
// Create categories
const electronicsCategory = await prisma.category.upsert({
where: { name: 'Electronics' },
update: {},
create: {
name: 'Electronics',
description: 'Electronic devices and accessories',
},
});
const computersCategory = await prisma.category.upsert({
where: { name: 'Computers' },
update: {},
create: {
name: 'Computers',
description: 'Desktop and laptop computers',
parentId: electronicsCategory.id,
},
});
const clothingCategory = await prisma.category.upsert({
where: { name: 'Clothing' },
update: {},
create: {
name: 'Clothing',
description: 'Apparel and fashion items',
},
});
// Create products
const laptop = await prisma.product.upsert({
where: { sku: 'LAPTOP-001' },
update: {},
create: {
name: 'Gaming Laptop',
description: 'High-performance gaming laptop with RTX graphics',
sku: 'LAPTOP-001',
price: 1299.99,
categoryId: computersCategory.id,
stockQuantity: 10,
},
});
const tshirt = await prisma.product.upsert({
where: { sku: 'TSHIRT-001' },
update: {},
create: {
name: 'Cotton T-Shirt',
description: 'Comfortable cotton t-shirt in various colors',
sku: 'TSHIRT-001',
price: 19.99,
categoryId: clothingCategory.id,
stockQuantity: 50,
},
});
// Create inventory movements
await prisma.inventoryMovement.create({
data: {
productId: laptop.id,
movementType: 'IN',
quantity: 10,
reference: 'INITIAL_STOCK',
notes: 'Initial stock entry',
},
});
await prisma.inventoryMovement.create({
data: {
productId: tshirt.id,
movementType: 'IN',
quantity: 50,
reference: 'INITIAL_STOCK',
notes: 'Initial stock entry',
},
});
console.log('β
Database seeding completed!');
console.log('π€ Admin user:', adminUser.email);
console.log('π€ Test user:', testUser.email);
console.log('π¦ Products created:', [laptop.name, tshirt.name]);
}
main()
.catch((e) => {
console.error('β Seeding failed:', e);
process.exit(1);
})
.finally(async () => {
await prisma.$disconnect();
});`;
fs.writeFileSync(
path.join(projectPath, 'prisma/seeds/seed.ts'),
seedContent
);
}
async generateUtilities(projectPath) {
// Connection utility
const connectionUtilContent = `import { Pool, PoolClient } from 'pg';
import { pool } from '../../config/database';
export class DatabaseConnection {
private static instance: DatabaseConnection;
private pool: Pool;
private constructor() {
this.pool = pool;
}
public static getInstance(): DatabaseConnection {
if (!DatabaseConnection.instance) {
DatabaseConnection.instance = new DatabaseConnection();
}
return DatabaseConnection.instance;
}
async getClient(): Promise<PoolClient> {
return await this.pool.connect();
}
async query(text: string, params?: any[]): Promise<any> {
const client = await this.getClient();
try {
const result = await client.query(text, params);
return result;
} finally {
client.release();
}
}
async transaction<T>(callback: (client: PoolClient) => Promise<T>): Promise<T> {
const client = await this.getClient();
try {
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
async healthCheck(): Promise<boolean> {
try {
await this.query('SELECT 1');
return true;
} catch (error) {
console.error('Database health check failed:', error);
return false;
}
}
async close(): Promise<void> {
await this.pool.end();
}
}
export const db = DatabaseConnection.getInstance();`;
fs.writeFileSync(
path.join(projectPath, 'src/database/utils/connection.ts'),
connectionUtilContent
);
// Migration utility
const migrationUtilContent = `import fs from 'fs';
import path from 'path';
import { db } from './connection';
export interface Migration {
id: string;
name: string;
sql: string;
appliedAt?: Date;
}
export class MigrationManager {
private migrationsPath: string;
constructor(migrationsPath: string = 'prisma/migrations') {
this.migrationsPath = migrationsPath;
}
async createMigrationsTable(): Promise<void> {
const sql = \`
CREATE TABLE IF NOT EXISTS _migrations (
id VARCHAR(255) PRIMARY KEY,
name VARCHAR(255) NOT NULL,
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
\`;
await db.query(sql);
}
async getAppliedMigrations(): Promise<string[]> {
try {
const result = await db.query('SELECT id FROM _migrations ORDER BY applied_at');
return result.rows.map((row: any) => row.id);
} catch (error) {
return [];
}
}
async getPendingMigrations(): Promise<Migration[]> {
const appliedMigrations = await this.getAppliedMigrations();
const allMigrations = this.getAllMigrations();
return allMigrations.filter(migration =>
!appliedMigrations.includes(migration.id)
);
}
getAllMigrations(): Migration[] {
if (!fs.existsSync(this.migrationsPath)) {
return [];
}
const migrationDirs = fs.readdirSync(this.migrationsPath)
.filter(dir => fs.statSync(path.join(this.migrationsPath, dir)).isDirectory())
.sort();
return migrationDirs.map(dir => {
const migrationFile = path.join(this.migrationsPath, dir, 'migration.sql');
if (fs.existsSync(migrationFile)) {
return {
id: dir,
name: dir,
sql: fs.readFileSync(migrationFile, 'utf8')
};
}
throw new Error(\`Migration file not found: \${migrationFile}\`);
});
}
async applyMigration(migration: Migration): Promise<void> {
await db.transaction(async (client) => {
// Execute migration SQL
await client.query(migration.sql);
// Record migration as applied
await client.query(
'INSERT INTO _migrations (id, name) VALUES ($1, $2)',
[]
);
});
}
async runMigrations(): Promise<void> {
console.log('π Running database migrations...');
await this.createMigrationsTable();
const pendingMigrations = await this.getPendingMigrations();
if (pendingMigrations.length === 0) {
console.log('β
No pending migrations');
return;
}
for (const migration of pendingMigrations) {
console.log(\`π Applying migration: \${migration.name}\`);
await this.applyMigration(migration);
console.log(\`β
Migration applied: \${migration.name}\`);
}
console.log(\`β
Applied \${pendingMigrations.length} migrations\`);
}
async rollbackMigration(migrationId: string): Promise<void> {
// This is a basic implementation - in production you'd want rollback scripts
await db.query('DELETE FROM _migrations WHERE id = $1', [migrationId]);
console.log(\`βͺ Rolled back migration: \${migrationId}\`);
}
}`;
fs.writeFileSync(
path.join(projectPath, 'src/database/utils/migration.ts'),
migrationUtilContent
);
// Types file
const typesContent = `export interface DatabaseConfig {
host: string;
port: number;
database: string;
user: string;
password: string;
ssl?: boolean | object;
max?: number;
idleTimeoutMillis?: number;
connectionTimeoutMillis?: number;
}
export interface QueryResult<T = any> {
rows: T[];
rowCount: number;
command: string;
oid: number;
fields: any[];
}
export interface TransactionCallback<T> {
(client: any): Promise<T>;
}
export interface MigrationRecord {
id: string;
name: string;
applied_at: Date;
}
export interface BackupOptions {
filename?: string;
compress?: boolean;
includeData?: boolean;
tables?: string[];
}
export interface RestoreOptions {
filename: string;
dropExisting?: boolean;
verbose?: boolean;
}`;
fs.writeFileSync(
path.join(projectPath, 'src/database/types/index.ts'),
typesContent
);
}
async generateBackupScripts(projectPath) {
const backupScriptContent = `
set -e
if [ -f .env ]; then
export $(cat .env | grep -v '#' | awk '/=/ {print $1}')
fi
DB_HOST=\${POSTGRES_HOST:-localhost}
DB_PORT=\${POSTGRES_PORT:-5432}
DB_NAME=\${POSTGRES_DB:-myapp}
DB_USER=\${POSTGRES_USER:-postgres}
BACKUP_DIR=\${1:-./backups}
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
BACKUP_FILE="\${BACKUP_DIR}/\${DB_NAME}_\${TIMESTAMP}.sql"
mkdir -p "\$BACKUP_DIR"
echo "ποΈ Starting PostgreSQL backup..."
echo "π Host: \$DB_HOST:\$DB_PORT"
echo "ποΈ Database: \$DB_NAME"
echo "π Backup file: \$BACKUP_FILE"
PGPASSWORD=\$POSTGRES_PASSWORD pg_dump \\
--host=\$DB_HOST \\
--port=\$DB_PORT \\
--username=\$DB_USER \\
--dbname=\$DB_NAME \\
--verbose \\
--clean \\
--no-owner \\
--no-privileges \\
--format=plain \\
--file="\$BACKUP_FILE"
if [ $? -eq 0 ]; then
echo "β
Backup completed successfully!"
echo "π Backup file: \$BACKUP_FILE"
echo "π File size: $(du -h "\$BACKUP_FILE" | cut -f1)"
gzip "\$BACKUP_FILE"
echo "ποΈ Backup compressed: \${BACKUP_FILE}.gz"
else
echo "β Backup failed!"
exit 1
fi
find "\$BACKUP_DIR" -name "\${DB_NAME}_*.sql.gz" -mtime +7 -delete
echo "π§Ή Old backups cleaned (kept last 7 days)"`;
fs.writeFileSync(
path.join(projectPath, 'scripts/database/backup.sh'),
backupScriptContent
);
const restoreScriptContent = `
set -e
if [ -z "$1" ]; then
echo "β Usage: $0 <backup_file>"
echo "π Available backups:"
ls -la ./backups/*.sql.gz 2>/dev/null || echo "No backups found"
exit 1
fi
BACKUP_FILE="$1"
# Check if backup file exists
if [ ! -f "$BACKUP_FILE" ]; then
echo "β Backup file not found: $BACKUP_FILE"
exit 1
fi
# Load environment variables
if [ -f .env ]; then
export $(cat .env | grep -v '#' | awk '/=/ {print $1}')
fi
# Configuration
DB_HOST=\${POSTGRES_HOST:-localhost}
DB_PORT=\${POSTGRES_PORT:-5432}
DB_NAME=\${POSTGRES_DB:-myapp}
DB_USER=\${POSTGRES_USER:-postgres}
echo "π Starting PostgreSQL restore..."
echo "π Host: \$DB_HOST:\$DB_PORT"
echo "ποΈ Database: \$DB_NAME"
echo "π Backup file: \$BACKUP_FILE"
# Ask for confirmation
read -p "β οΈ This will replace the current database. Continue? (y/N): " -n 1 -r
echo
if [[ ! \$REPLY =~ ^[Yy]$ ]]; then
echo "β Restore cancelled"
exit 1
fi
# Decompress if needed
if [[ "\$BACKUP_FILE" == *.gz ]]; then
echo "ποΈ Decompressing backup..."
TEMP_FILE="/tmp/restore_\$(date +%s).sql"
gunzip -c "\$BACKUP_FILE" > "\$TEMP_FILE"
RESTORE_FILE="\$TEMP_FILE"
else
RESTORE_FILE="\$BACKUP_FILE"
fi
# Restore database
PGPASSWORD=\$POSTGRES_PASSWORD psql \\
--host=\$DB_HOST \\
--port=\$DB_PORT \\
--username=\$DB_USER \\
--dbname=\$DB_NAME \\
--file="\$RESTORE_FILE"
if [ $? -eq 0 ]; then
echo "β
Restore completed successfully!"
else
echo "β Restore failed!"
exit 1
fi
# Clean up temporary file
if [ -n "\$TEMP_FILE" ] && [ -f "\$TEMP_FILE" ]; then
rm "\$TEMP_FILE"
fi
echo "π Database restore completed!"`;
fs.writeFileSync(
path.join(projectPath, 'scripts/database/restore.sh'),
restoreScriptContent
);
// Make scripts executable
try {
fs.chmodSync(path.join(projectPath, 'scripts/database/backup.sh'), '755');
fs.chmodSync(path.join(projectPath, 'scripts/database/restore.sh'), '755');
} catch (error) {
// Ignore chmod errors on Windows
}
}
async updatePackageJson(projectPath) {
const packageJsonPath = path.join(projectPath, 'package.json');
let packageJson = {};
if (fs.existsSync(packageJsonPath)) {
packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
}
// Add dependencies
packageJson.dependencies = {
...packageJson.dependencies,
'pg': '^8.11.0',
'@prisma/client': '^5.0.0',
'bcrypt': '^5.1.0'
};
packageJson.devDependencies = {
...packageJson.devDependencies,
'@types/pg': '^8.10.0',
'@types/bcrypt': '^5.0.0',
'prisma': '^5.0.0'
};
// Add scripts
packageJson.scripts = {
...packageJson.scripts,
'db:generate': 'prisma generate',
'db:migrate': 'prisma migrate dev',
'db:migrate:prod': 'prisma migrate deploy',
'db:seed': 'tsx prisma/seeds/seed.ts',
'db:studio': 'prisma studio',
'db:backup': 'bash scripts/database/backup.sh',
'db:restore': 'bash scripts/database/restore.sh',
'db:reset': 'prisma migrate reset --force'
};
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2));
}
getGeneratedFiles() {
return [
'config/database.ts',
'prisma/schema.prisma',
'prisma/migrations/001_initial/migration.sql',
'prisma/seeds/seed.ts',
'src/database/types/index.ts',
'src/database/utils/connection.ts',
'src/database/utils/migration.ts',
'scripts/database/backup.sh',
'scripts/database/restore.sh'
];
}
// Integration methods
async setupDatabaseMonitoring(config = {}) {
// Integration with monitoring module
console.log('π Setting up database monitoring...');
// This would integrate with the monitoring module
}
async setupDatabaseLogging(config = {}) {
// Integration with logging module
console.log('π Setting up database logging...');
// This would integrate with the logging module
}
async testDatabaseConnection() {
console.log('π§ͺ Testing database connection...');
try {
const { checkDatabaseHealth } = require('../../config/database');
const isHealthy = await checkDatabaseHealth();
if (isHealthy) {
console.log('β
Database connection test passed!');
} else {
console.log('β Database connection test failed!');
}
return isHealthy;
} catch (error) {
console.error('β Database connection test error:', error);
return false;
}
}
}
module.exports = PostgresModule;