forge-sql-orm-cli
Version:
CLI tool for Forge SQL ORM
1 lines • 74.7 kB
Source Map (JSON)
{"version":3,"file":"cli.mjs","sources":["../src/actions/generate-models.ts","../src/actions/migrations-create.ts","../src/actions/migrations-update.ts","../src/actions/migrations-drops.ts","../src/cli.ts"],"sourcesContent":["import \"reflect-metadata\";\nimport fs from \"fs\";\nimport path from \"path\";\nimport { execSync } from \"child_process\";\n\n/**\n * Options for model generation\n */\ninterface GenerateModelsOptions {\n host: string;\n port: number;\n user: string;\n password: string;\n dbName: string;\n output: string;\n versionField: string;\n}\n\n/**\n * Interface for column metadata\n */\ninterface ColumnMetadata {\n autoincrement: boolean;\n name: string;\n type: string;\n primaryKey: boolean;\n notNull: boolean;\n}\n\n/**\n * Interface for table metadata\n */\ninterface TableMetadata {\n name: string;\n columns: Record<string, ColumnMetadata>;\n compositePrimaryKeys: Record<string, { name: string; columns: string[] }>;\n indexes: Record<string, any>;\n foreignKeys: Record<string, any>;\n uniqueConstraints: Record<string, any>;\n checkConstraint: Record<string, any>;\n}\n\n/**\n * Interface for version field metadata\n */\ninterface VersionFieldMetadata {\n fieldName: string;\n}\n\n/**\n * Interface for table version metadata\n */\ninterface TableVersionMetadata {\n tableName: string;\n versionField: VersionFieldMetadata;\n}\n\n/**\n * Type for additional metadata map\n */\ntype AdditionalMetadata = Record<string, TableVersionMetadata>;\n\n/**\n * Interface for journal entry\n */\ninterface JournalEntry {\n idx: number;\n version: string;\n when: number;\n tag: string;\n breakpoints: boolean;\n}\n\n/**\n * Interface for journal data\n */\ninterface JournalData {\n version: string;\n dialect: string;\n entries: JournalEntry[];\n}\n\n/**\n * Replaces MySQL types with custom types in the generated schema\n * @param schemaContent - The content of the generated schema file\n * @returns Modified schema content with custom types\n */\nfunction replaceMySQLTypes(schemaContent: string): string {\n // Add imports at the top of the file\n const imports = `import { forgeDateTimeString, forgeTimeString, forgeDateString, forgeTimestampString } from \"forge-sql-orm\";\\n\\n`;\n\n // Replace types in the content\n let modifiedContent = schemaContent\n // Handle datetime with column name and mode option\n .replace(\n /datetime\\(['\"]([^'\"]+)['\"],\\s*{\\s*mode:\\s*['\"]string['\"]\\s*}\\)/g,\n \"forgeDateTimeString('$1')\",\n )\n // Handle datetime with column name only\n .replace(/datetime\\(['\"]([^'\"]+)['\"]\\)/g, \"forgeDateTimeString('$1')\")\n // Handle datetime with mode option only\n .replace(/datetime\\(\\s*{\\s*mode:\\s*['\"]string['\"]\\s*}\\s*\\)/g, \"forgeDateTimeString()\")\n // Handle time with column name and mode option\n .replace(/time\\(['\"]([^'\"]+)['\"],\\s*{\\s*mode:\\s*['\"]string['\"]\\s*}\\)/g, \"forgeTimeString('$1')\")\n // Handle time with column name only\n .replace(/time\\(['\"]([^'\"]+)['\"]\\)/g, \"forgeTimeString('$1')\")\n // Handle time with mode option only\n .replace(/time\\(\\s*{\\s*mode:\\s*['\"]string['\"]\\s*}\\s*\\)/g, \"forgeTimeString()\")\n // Handle date with column name and mode option\n .replace(/date\\(['\"]([^'\"]+)['\"],\\s*{\\s*mode:\\s*['\"]string['\"]\\s*}\\)/g, \"forgeDateString('$1')\")\n // Handle date with column name only\n .replace(/date\\(['\"]([^'\"]+)['\"]\\)/g, \"forgeDateString('$1')\")\n // Handle date with mode option only\n .replace(/date\\(\\s*{\\s*mode:\\s*['\"]string['\"]\\s*}\\s*\\)/g, \"forgeDateString()\")\n // Handle timestamp with column name and mode option\n .replace(\n /timestamp\\(['\"]([^'\"]+)['\"],\\s*{\\s*mode:\\s*['\"]string['\"]\\s*}\\)/g,\n \"forgeTimestampString('$1')\",\n )\n // Handle timestamp with column name only\n .replace(/timestamp\\(['\"]([^'\"]+)['\"]\\)/g, \"forgeTimestampString('$1')\")\n // Handle timestamp with mode option only\n .replace(/timestamp\\(\\s*{\\s*mode:\\s*['\"]string['\"]\\s*}\\s*\\)/g, \"forgeTimestampString()\");\n\n // Add imports if they don't exist\n if (!modifiedContent.includes(\"import { forgeDateTimeString\")) {\n modifiedContent = imports + modifiedContent;\n }\n\n return modifiedContent;\n}\n\n/**\n * Generates models for all tables in the database using drizzle-kit\n * @param options - Generation options\n */\nexport const generateModels = async (options: GenerateModelsOptions) => {\n try {\n // Generate models using drizzle-kit pull\n await execSync(\n `npx drizzle-kit pull --dialect mysql --url mysql://${options.user}:${options.password}@${options.host}:${options.port}/${options.dbName} --out ${options.output}`,\n { encoding: \"utf-8\" },\n );\n\n // Process metadata to create version map\n const metaDir = path.join(options.output, \"meta\");\n const additionalMetadata: AdditionalMetadata = {};\n\n if (fs.existsSync(metaDir)) {\n const snapshotFile = path.join(metaDir, \"0000_snapshot.json\");\n if (fs.existsSync(snapshotFile)) {\n const snapshotData = JSON.parse(fs.readFileSync(snapshotFile, \"utf-8\"));\n\n // Process each table from the snapshot\n for (const [tableName, tableData] of Object.entries(snapshotData.tables)) {\n const table = tableData as TableMetadata;\n\n // Find version field in columns\n const versionField = Object.entries(table.columns).find(\n ([_, col]) => col.name.toLowerCase() === options.versionField,\n );\n\n if (versionField) {\n const [_, col] = versionField;\n const fieldType = col.type;\n const isSupportedType =\n fieldType === \"datetime\" ||\n fieldType === \"timestamp\" ||\n fieldType === \"int\" ||\n fieldType === \"number\" ||\n fieldType === \"decimal\";\n if (!col.notNull) {\n console.warn(\n `Version field \"${col.name}\" in table ${tableName} is nullable. Versioning may not work correctly.`,\n );\n } else if (!isSupportedType) {\n console.warn(\n `Version field \"${col.name}\" in table ${tableName} has unsupported type \"${fieldType}\". ` +\n `Only datetime, timestamp, int, and decimal types are supported for versioning. Versioning will be skipped.`,\n );\n } else {\n additionalMetadata[tableName] = {\n tableName,\n versionField: {\n fieldName: col.name,\n },\n };\n }\n }\n }\n }\n }\n\n // Create version metadata file\n const versionMetadataContent = `/**\n * This file was auto-generated by forge-sql-orm\n * Generated at: ${new Date().toISOString()}\n * \n * DO NOT EDIT THIS FILE MANUALLY\n * Any changes will be overwritten on next generation\n */\n \n \nexport * from \"./relations\";\nexport * from \"./schema\";\n\nexport interface VersionFieldMetadata {\n fieldName: string;\n}\n\nexport interface TableMetadata {\n tableName: string;\n versionField: VersionFieldMetadata;\n}\n\nexport type AdditionalMetadata = Record<string, TableMetadata>;\n\nexport const additionalMetadata: AdditionalMetadata = ${JSON.stringify(additionalMetadata, null, 2)};\n`;\n\n fs.writeFileSync(path.join(options.output, \"index.ts\"), versionMetadataContent);\n\n // Replace MySQL types in the generated schema file\n const schemaPath = path.join(options.output, \"schema.ts\");\n if (fs.existsSync(schemaPath)) {\n const schemaContent = fs.readFileSync(schemaPath, \"utf-8\");\n const modifiedContent = replaceMySQLTypes(schemaContent);\n fs.writeFileSync(schemaPath, modifiedContent);\n console.log(`✅ Updated schema types in: ${schemaPath}`);\n }\n\n // Remove migration files and meta directory if they exist\n const migrationDir = path.join(options.output, \"migrations\");\n\n if (fs.existsSync(migrationDir)) {\n fs.rmSync(migrationDir, { recursive: true, force: true });\n console.log(`✅ Removed: ${migrationDir}`);\n }\n\n // Read journal and remove corresponding SQL file\n if (fs.existsSync(metaDir)) {\n const journalFile = path.join(metaDir, \"_journal.json\");\n if (fs.existsSync(journalFile)) {\n const journalData = JSON.parse(fs.readFileSync(journalFile, \"utf-8\")) as JournalData;\n\n // Remove SQL files for each entry\n for (const entry of journalData.entries) {\n const sqlFile = path.join(options.output, `${entry.tag}.sql`);\n if (fs.existsSync(sqlFile)) {\n fs.rmSync(sqlFile, { force: true });\n console.log(`✅ Removed SQL file: ${entry.tag}.sql`);\n }\n }\n }\n\n // Remove meta directory after processing\n fs.rmSync(metaDir, { recursive: true, force: true });\n console.log(`✅ Removed: ${metaDir}`);\n }\n\n console.log(`✅ Successfully generated models and version metadata`);\n process.exit(0);\n } catch (error) {\n console.error(`❌ Error during model generation:`, error);\n process.exit(1);\n }\n};\n","import \"reflect-metadata\";\nimport fs from \"fs\";\nimport path from \"path\";\n\nimport { execSync } from \"child_process\";\n\n/**\n * Options for migration creation\n */\nexport interface CreateMigrationOptions {\n output: string;\n entitiesPath: string;\n force?: boolean;\n}\n\n/**\n * Loads the current migration version from `migrationCount.ts`.\n * @param migrationPath - Path to the migration folder.\n * @returns The latest migration version.\n */\nexport const loadMigrationVersion = async (migrationPath: string): Promise<number> => {\n try {\n const migrationCountFilePath = path.resolve(path.join(migrationPath, \"migrationCount.ts\"));\n if (!fs.existsSync(migrationCountFilePath)) {\n console.log(`✅ Current migration version: 0`);\n return 0;\n }\n\n const { MIGRATION_VERSION } = await import(migrationCountFilePath);\n console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);\n return MIGRATION_VERSION as number;\n } catch (error) {\n console.error(`❌ Error loading migrationCount:`, error);\n process.exit(1);\n }\n};\n\n/**\n * Cleans SQL statements by removing unnecessary database options.\n * @param sql - The raw SQL statement.\n * @returns The cleaned SQL statement.\n */\nexport function cleanSQLStatement(sql: string): string {\n // Add IF NOT EXISTS to CREATE TABLE statements\n sql = sql.replace(/create\\s+table\\s+(\\w+)/gi, \"create table if not exists $1\");\n\n // Add IF NOT EXISTS to CREATE INDEX statements\n sql = sql.replace(/create\\s+index\\s+(\\w+)/gi, \"create index if not exists $1\");\n\n // Add IF NOT EXISTS to ADD INDEX statements\n sql = sql.replace(\n /alter\\s+table\\s+(\\w+)\\s+add\\s+index\\s+(\\w+)/gi,\n \"alter table $1 add index if not exists $2\",\n );\n\n // Add IF NOT EXISTS to ADD CONSTRAINT statements\n sql = sql.replace(\n /alter\\s+table\\s+(\\w+)\\s+add\\s+constraint\\s+(\\w+)/gi,\n \"alter table $1 add constraint if not exists $2\",\n );\n\n // Remove unnecessary database options\n return sql.replace(/\\s+default\\s+character\\s+set\\s+utf8mb4\\s+engine\\s*=\\s*InnoDB;?/gi, \"\").trim();\n}\n\n/**\n * Generates a migration file using the provided SQL statements.\n * @param createStatements - Array of SQL statements.\n * @param version - Migration version number.\n * @returns TypeScript migration file content.\n */\nexport function generateMigrationFile(createStatements: string[], version: number): string {\n const versionPrefix = `v${version}_MIGRATION`;\n\n // Clean each SQL statement and generate migration lines with .enqueue()\n const migrationLines = createStatements\n .map(\n (stmt, index) =>\n ` .enqueue(\"${versionPrefix}${index}\", \"${cleanSQLStatement(stmt).replace(/\\s+/g, \" \")}\")`,\n )\n .join(\"\\n\");\n\n // Migration template\n return `import { MigrationRunner } from \"@forge/sql/out/migration\";\n\nexport default (migrationRunner: MigrationRunner): MigrationRunner => {\n return migrationRunner\n${migrationLines};\n};`;\n}\n\n/**\n * Saves the generated migration file along with `migrationCount.ts` and `index.ts`.\n * @param migrationCode - The migration code to be written to the file.\n * @param version - Migration version number.\n * @param outputDir - Directory where the migration files will be saved.\n */\nexport function saveMigrationFiles(migrationCode: string, version: number, outputDir: string) {\n if (!fs.existsSync(outputDir)) {\n fs.mkdirSync(outputDir, { recursive: true });\n }\n\n const migrationFilePath = path.join(outputDir, `migrationV${version}.ts`);\n const migrationCountPath = path.join(outputDir, `migrationCount.ts`);\n const indexFilePath = path.join(outputDir, `index.ts`);\n\n // Write the migration file\n fs.writeFileSync(migrationFilePath, migrationCode);\n\n // Write the migration count file\n fs.writeFileSync(migrationCountPath, `export const MIGRATION_VERSION = ${version};`);\n\n // Generate the migration index file\n const indexFileContent = `import { MigrationRunner } from \"@forge/sql/out/migration\";\nimport { MIGRATION_VERSION } from \"./migrationCount\";\n\nexport type MigrationType = (\n migrationRunner: MigrationRunner,\n) => MigrationRunner;\n\nexport default async (\n migrationRunner: MigrationRunner,\n): Promise<MigrationRunner> => {\n for (let i = 1; i <= MIGRATION_VERSION; i++) {\n const migrations = (await import(\\`./migrationV\\${i}\\`)) as {\n default: MigrationType;\n };\n migrations.default(migrationRunner);\n }\n return migrationRunner;\n};`;\n\n fs.writeFileSync(indexFilePath, indexFileContent);\n\n console.log(`✅ Migration file created: ${migrationFilePath}`);\n console.log(`✅ Migration count file updated: ${migrationCountPath}`);\n console.log(`✅ Migration index file created: ${indexFilePath}`);\n}\n\n/**\n * Extracts only the relevant SQL statements for migration.\n * @param schema - The full database schema as SQL.\n * @returns Filtered list of SQL statements.\n */\nexport const extractCreateStatements = (schema: string): string[] => {\n // Split by statement-breakpoint and semicolon\n const statements = schema\n .split(/--> statement-breakpoint|;/)\n .map((s) => s.trim())\n .filter((s) => s.length > 0);\n\n return statements.filter(\n (stmt) =>\n stmt.toLowerCase().startsWith(\"create table\") ||\n stmt.toLowerCase().startsWith(\"alter table\") ||\n stmt.toLowerCase().includes(\"add index\") ||\n stmt.toLowerCase().includes(\"create index\") ||\n stmt.toLowerCase().includes(\"add unique index\") ||\n stmt.toLowerCase().includes(\"add constraint\"),\n );\n};\n\n/**\n * Creates a full database migration.\n * @param options - Database connection settings and output paths.\n */\nexport const createMigration = async (options: CreateMigrationOptions) => {\n try {\n let version = await loadMigrationVersion(options.output);\n\n if (version > 0) {\n if (options.force) {\n console.warn(\n `⚠️ Warning: Migration already exists. Creating new migration with force flag...`,\n );\n } else {\n console.error(\n `❌ Error: Migration has already been created. Use --force flag to override.`,\n );\n process.exit(1);\n }\n }\n\n // Generate SQL using drizzle-kit\n await execSync(\n `npx drizzle-kit generate --name=init --dialect mysql --out ${options.output} --schema ${options.entitiesPath}`,\n { encoding: \"utf-8\" },\n );\n const initSqlFile = path.join(options.output, \"0000_init.sql\");\n const sql = fs.readFileSync(initSqlFile, \"utf-8\");\n\n // Extract and clean statements\n const createStatements = extractCreateStatements(sql);\n\n // Generate and save migration files\n const migrationFile = generateMigrationFile(createStatements, 1);\n saveMigrationFiles(migrationFile, 1, options.output);\n\n fs.rmSync(initSqlFile, { force: true });\n console.log(`✅ Removed SQL file: ${initSqlFile}`);\n // Remove meta directory after processing\n let metaDir = path.join(options.output, \"meta\");\n fs.rmSync(metaDir, { recursive: true, force: true });\n console.log(`✅ Removed: ${metaDir}`);\n console.log(`✅ Migration successfully created!`);\n process.exit(0);\n } catch (error) {\n console.error(`❌ Error during migration creation:`, error);\n process.exit(1);\n }\n};\n","import \"reflect-metadata\";\nimport fs from \"fs\";\nimport path from \"path\";\nimport mysql from \"mysql2/promise\";\nimport { MySqlTable, TableConfig } from \"drizzle-orm/mysql-core\";\nimport { RowDataPacket } from \"mysql2\";\nimport { getTableMetadata } from \"forge-sql-orm\";\nimport { AnyIndexBuilder } from \"drizzle-orm/mysql-core/indexes\";\nimport { ForeignKeyBuilder } from \"drizzle-orm/mysql-core/foreign-keys\";\nimport { UniqueConstraintBuilder } from \"drizzle-orm/mysql-core/unique-constraint\";\n\ninterface DrizzleColumn {\n type: string;\n notNull: boolean;\n autoincrement?: boolean;\n columnType?: any;\n name: string;\n getSQLType: () => string;\n}\n\ninterface DrizzleSchema {\n [tableName: string]: {\n [columnName: string]: DrizzleColumn;\n };\n}\n\ninterface DatabaseColumn extends RowDataPacket {\n TABLE_NAME: string;\n COLUMN_NAME: string;\n COLUMN_TYPE: string;\n IS_NULLABLE: string;\n COLUMN_KEY: string;\n EXTRA: string;\n}\n\ninterface DatabaseIndex extends RowDataPacket {\n TABLE_NAME: string;\n INDEX_NAME: string;\n COLUMN_NAME: string;\n NON_UNIQUE: number;\n}\n\ninterface DatabaseForeignKey extends RowDataPacket {\n TABLE_NAME: string;\n COLUMN_NAME: string;\n CONSTRAINT_NAME: string;\n REFERENCED_TABLE_NAME: string;\n REFERENCED_COLUMN_NAME: string;\n}\n\ninterface TableSchema {\n columns: Record<string, DatabaseColumn>;\n indexes: Record<\n string,\n {\n columns: string[];\n unique: boolean;\n }\n >;\n foreignKeys: Record<\n string,\n {\n column: string;\n referencedTable: string;\n referencedColumn: string;\n }\n >;\n}\n\ninterface DatabaseSchema {\n [tableName: string]: TableSchema;\n}\n\n/**\n * Generates a migration file using the provided SQL statements.\n * @param createStatements - Array of SQL statements.\n * @param version - Migration version number.\n * @returns TypeScript migration file content.\n */\nfunction generateMigrationFile(createStatements: string[], version: number): string {\n const versionPrefix = `v${version}_MIGRATION`;\n\n // Clean each SQL statement and generate migration lines with .enqueue()\n const migrationLines = createStatements\n .map((stmt, index) => ` .enqueue(\"${versionPrefix}${index}\", \"${stmt}\")`)\n .join(\"\\n\");\n\n // Migration template\n return `import { MigrationRunner } from \"@forge/sql/out/migration\";\n\nexport default (migrationRunner: MigrationRunner): MigrationRunner => {\n return migrationRunner\n${migrationLines};\n};`;\n}\n\n/**\n * Filters out SQL statements that already exist in the previous migration file\n * @param newStatements - Array of SQL statements from new migration\n * @param prevVersion - Previous migration version\n * @param outputDir - Directory where migration files are stored\n * @returns Array of SQL statements that don't exist in previous migration\n */\nfunction filterWithPreviousMigration(\n newStatements: string[],\n prevVersion: number,\n outputDir: string,\n): string[] {\n const prevMigrationPath = path.join(outputDir, `migrationV${prevVersion}.ts`);\n\n if (!fs.existsSync(prevMigrationPath)) {\n return newStatements.map((s) => s.replace(/\\s+/g, \" \"));\n }\n\n // Read previous migration file\n const prevContent = fs.readFileSync(prevMigrationPath, \"utf-8\");\n\n // Extract SQL statements from the file\n const prevStatements = prevContent\n .split(\"\\n\")\n .filter((line) => line.includes(\".enqueue(\"))\n .map((line) => {\n const match = line.match(/\\.enqueue\\([^,]+,\\s*\"([^\"]+)\"/);\n return match ? match[1].replace(/\\s+/g, \" \").trim() : \"\";\n });\n\n // Filter out statements that already exist in previous migration\n return newStatements\n .filter((s) => !prevStatements.includes(s.replace(/\\s+/g, \" \")))\n .map((s) => s.replace(/\\s+/g, \" \"));\n}\n\n/**\n * Saves the generated migration file along with `migrationCount.ts` and `index.ts`.\n * @param migrationCode - The migration code to be written to the file.\n * @param version - Migration version number.\n * @param outputDir - Directory where the migration files will be saved.\n * @returns boolean indicating if migration was saved\n */\nfunction saveMigrationFiles(migrationCode: string, version: number, outputDir: string): boolean {\n if (!fs.existsSync(outputDir)) {\n fs.mkdirSync(outputDir, { recursive: true });\n }\n\n const migrationFilePath = path.join(outputDir, `migrationV${version}.ts`);\n const migrationCountPath = path.join(outputDir, `migrationCount.ts`);\n const indexFilePath = path.join(outputDir, `index.ts`);\n\n // Write the migration file\n fs.writeFileSync(migrationFilePath, migrationCode);\n\n // Write the migration count file\n fs.writeFileSync(migrationCountPath, `export const MIGRATION_VERSION = ${version};`);\n\n // Generate the migration index file\n const indexFileContent = `import { MigrationRunner } from \"@forge/sql/out/migration\";\nimport { MIGRATION_VERSION } from \"./migrationCount\";\n\nexport type MigrationType = (\n migrationRunner: MigrationRunner,\n) => MigrationRunner;\n\nexport default async (\n migrationRunner: MigrationRunner,\n): Promise<MigrationRunner> => {\n for (let i = 1; i <= MIGRATION_VERSION; i++) {\n const migrations = (await import(\\`./migrationV\\${i}\\`)) as {\n default: MigrationType;\n };\n migrations.default(migrationRunner);\n }\n return migrationRunner;\n};`;\n\n fs.writeFileSync(indexFilePath, indexFileContent);\n\n console.log(`✅ Migration file created: ${migrationFilePath}`);\n console.log(`✅ Migration count file updated: ${migrationCountPath}`);\n console.log(`✅ Migration index file created: ${indexFilePath}`);\n\n return true;\n}\n\n/**\n * Loads the current migration version from `migrationCount.ts`.\n * @param migrationPath - Path to the migration folder.\n * @returns The latest migration version.\n */\nconst loadMigrationVersion = async (migrationPath: string): Promise<number> => {\n try {\n const migrationCountFilePath = path.resolve(path.join(migrationPath, \"migrationCount.ts\"));\n if (!fs.existsSync(migrationCountFilePath)) {\n console.warn(\n `⚠️ Warning: migrationCount.ts not found in ${migrationCountFilePath}, assuming no previous migrations.`,\n );\n return 0;\n }\n\n const { MIGRATION_VERSION } = await import(migrationCountFilePath);\n console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);\n return MIGRATION_VERSION as number;\n } catch (error) {\n console.error(`❌ Error loading migrationCount:`, error);\n process.exit(1);\n }\n};\n\n/**\n * Gets the current database schema from MySQL including indexes and foreign keys\n * @param connection - MySQL connection\n * @param dbName - Database name\n * @returns Database schema object with indexes and foreign keys\n */\nasync function getDatabaseSchema(\n connection: mysql.Connection,\n dbName: string,\n): Promise<DatabaseSchema> {\n // Get columns\n const [columns] = await connection.execute<DatabaseColumn[]>(\n `\n SELECT TABLE_NAME, COLUMN_NAME, COLUMN_TYPE, IS_NULLABLE, COLUMN_KEY, EXTRA\n FROM INFORMATION_SCHEMA.COLUMNS\n WHERE TABLE_SCHEMA = ?\n `,\n [dbName],\n );\n\n // Get indexes\n const [indexes] = await connection.execute<DatabaseIndex[]>(\n `\n SELECT TABLE_NAME, INDEX_NAME, COLUMN_NAME, NON_UNIQUE\n FROM INFORMATION_SCHEMA.STATISTICS\n WHERE TABLE_SCHEMA = ?\n ORDER BY TABLE_NAME, INDEX_NAME, SEQ_IN_INDEX\n `,\n [dbName],\n );\n\n // Get foreign keys\n const [foreignKeys] = await connection.execute<DatabaseForeignKey[]>(\n `\n SELECT \n TABLE_NAME,\n COLUMN_NAME,\n CONSTRAINT_NAME,\n REFERENCED_TABLE_NAME,\n REFERENCED_COLUMN_NAME\n FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE\n WHERE TABLE_SCHEMA = ?\n AND REFERENCED_TABLE_NAME IS NOT NULL\n `,\n [dbName],\n );\n\n const schema: DatabaseSchema = {};\n\n // Process columns\n columns.forEach((row) => {\n if (!schema[row.TABLE_NAME]) {\n schema[row.TABLE_NAME] = {\n columns: {},\n indexes: {},\n foreignKeys: {},\n };\n }\n schema[row.TABLE_NAME].columns[row.COLUMN_NAME] = row;\n });\n\n // Process indexes\n indexes.forEach((row) => {\n if (!schema[row.TABLE_NAME].indexes[row.INDEX_NAME]) {\n schema[row.TABLE_NAME].indexes[row.INDEX_NAME] = {\n columns: [],\n unique: !row.NON_UNIQUE,\n };\n }\n schema[row.TABLE_NAME].indexes[row.INDEX_NAME].columns.push(row.COLUMN_NAME);\n });\n\n // Process foreign keys\n foreignKeys.forEach((row) => {\n if (!schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME]) {\n schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME] = {\n column: row.COLUMN_NAME,\n referencedTable: row.REFERENCED_TABLE_NAME,\n referencedColumn: row.REFERENCED_COLUMN_NAME,\n };\n }\n });\n\n return schema;\n}\n\n/**\n * Converts MySQL type to normalized format for comparison\n * @param mysqlType - MySQL type from INFORMATION_SCHEMA or Drizzle type\n * @returns Normalized type string\n */\nfunction normalizeMySQLType(mysqlType: string): string {\n // Remove length/precision information\n let normalized = mysqlType.replace(/\\([^)]*\\)/, \"\").toLowerCase();\n\n // Remove 'mysql' prefix from Drizzle types\n normalized = normalized.replace(/^mysql/, \"\");\n\n return normalized;\n}\n\n/**\n * Gets the name of a foreign key constraint\n * @param fk - The foreign key builder\n * @returns The name of the foreign key constraint\n */\nfunction getForeignKeyName(fk: ForeignKeyBuilder): string {\n // @ts-ignore - Internal property access\n return fk.name;\n}\n\n/**\n * Gets the name of an index\n * @param index - The index builder\n * @returns The name of the index\n */\nfunction getIndexName(index: AnyIndexBuilder): string {\n // @ts-ignore - Internal property access\n return index.name;\n}\n\n/**\n * Gets the name of a unique constraint\n * @param uc - The unique constraint builder\n * @returns The name of the unique constraint\n */\nfunction getUniqueConstraintName(uc: UniqueConstraintBuilder): string {\n // @ts-ignore - Internal property access\n return uc.name;\n}\n\n/**\n * Gets the columns of an index\n * @param index - The index builder\n * @returns Array of column names\n */\nfunction getIndexColumns(index: AnyIndexBuilder): string[] {\n // @ts-ignore - Internal property access\n return index.columns.map((col) => col.name);\n}\n\nfunction compareForeignKey(\n fk: ForeignKeyBuilder,\n { columns }: { columns: string[]; unique: boolean },\n) {\n // @ts-ignore\n const fcolumns: string[] = fk.columns.map((c) => c.name);\n return fcolumns.sort().join(\",\") === columns.sort().join(\",\");\n}\n\n/**\n * Generates SQL changes by comparing Drizzle schema with database schema\n * @param drizzleSchema - Schema from Drizzle\n * @param dbSchema - Schema from database\n * @param schemaModule - Drizzle schema module\n * @returns Array of SQL statements\n */\nfunction generateSchemaChanges(\n drizzleSchema: DrizzleSchema,\n dbSchema: DatabaseSchema,\n schemaModule: Record<string, any>,\n): string[] {\n const changes: string[] = [];\n\n // First check existing tables in database\n for (const [tableName, dbTable] of Object.entries(dbSchema)) {\n const drizzleColumns = drizzleSchema[tableName];\n\n if (!drizzleColumns) {\n // Table exists in database but not in schema - create it\n const columns = Object.entries(dbTable.columns)\n .map(([colName, col]) => {\n const type = col.COLUMN_TYPE;\n const nullable = col.IS_NULLABLE === \"YES\" ? \"NULL\" : \"NOT NULL\";\n const autoIncrement = col.EXTRA.includes(\"auto_increment\") ? \"AUTO_INCREMENT\" : \"\";\n return `\\`${colName}\\` ${type} ${nullable} ${autoIncrement}`.trim();\n })\n .join(\",\\n \");\n\n changes.push(`CREATE TABLE if not exists \\`${tableName}\\` (\\n ${columns}\\n);`);\n\n // Create indexes for new table\n for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {\n // Skip primary key and foreign key indexes\n if (indexName === \"PRIMARY\") {\n continue;\n }\n\n // Check if any column in this index is a foreign key\n const isForeignKeyIndex = dbIndex.columns.some((colName) => {\n const column = dbTable.columns[colName];\n return column && column.COLUMN_KEY === \"MUL\" && column.EXTRA.includes(\"foreign key\");\n });\n\n if (isForeignKeyIndex) {\n continue;\n }\n\n // Create index\n const columns = dbIndex.columns.map((col) => `\\`${col}\\``).join(\", \");\n const unique = dbIndex.unique ? \"UNIQUE \" : \"\";\n changes.push(\n `CREATE ${unique}INDEX if not exists \\`${indexName}\\` ON \\`${tableName}\\` (${columns});`,\n );\n }\n\n // Create foreign keys for new table\n for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {\n changes.push(\n `ALTER TABLE \\`${tableName}\\` ADD CONSTRAINT \\`${fkName}\\` FOREIGN KEY (\\`${dbFK.column}\\`) REFERENCES \\`${dbFK.referencedTable}\\` (\\`${dbFK.referencedColumn}\\`);`,\n );\n }\n continue;\n }\n\n // Check for column changes in existing tables\n for (const [colName, dbCol] of Object.entries(dbTable.columns)) {\n const drizzleCol = Object.values(drizzleColumns).find((c) => c.name === colName);\n\n if (!drizzleCol) {\n // Column exists in database but not in schema - create it\n const type = dbCol.COLUMN_TYPE;\n const nullable = dbCol.IS_NULLABLE === \"YES\" ? \"NULL\" : \"NOT NULL\";\n changes.push(`ALTER TABLE \\`${tableName}\\` ADD COLUMN IF NOT EXISTS \\`${colName}\\` ${type} ${nullable};`);\n continue;\n }\n\n // Check for type changes\n const normalizedDbType = normalizeMySQLType(dbCol.COLUMN_TYPE);\n const normalizedDrizzleType = normalizeMySQLType(drizzleCol.getSQLType());\n\n if (normalizedDbType !== normalizedDrizzleType) {\n const type = dbCol.COLUMN_TYPE; // Use database type as source of truth\n const nullable = dbCol.IS_NULLABLE === \"YES\" ? \"NULL\" : \"NOT NULL\";\n changes.push(\n `ALTER TABLE \\`${tableName}\\` MODIFY COLUMN \\`${colName}\\` IF EXISTS ${type} ${nullable};`,\n );\n }\n }\n\n // Check for index changes\n const table = Object.values(schemaModule).find((t) => {\n const metadata = getTableMetadata(t);\n return metadata.tableName === tableName;\n });\n\n if (table) {\n const metadata = getTableMetadata(table);\n // First check indexes that exist in database but not in schema\n for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {\n // Skip primary key and foreign key indexes\n if (indexName === \"PRIMARY\") {\n continue;\n }\n\n // Check if this is a foreign key index\n const isForeignKeyIndex = metadata.foreignKeys.some(\n (fk) => getForeignKeyName(fk) === indexName || compareForeignKey(fk, dbIndex),\n );\n if (isForeignKeyIndex) {\n continue;\n }\n\n // Check if this is a unique constraint\n const existsUniqIndex = metadata.uniqueConstraints.find(\n (uc) => getUniqueConstraintName(uc) === indexName,\n );\n let drizzleIndex = metadata.indexes.find((i) => getIndexName(i) === indexName);\n\n if (!drizzleIndex && existsUniqIndex) {\n drizzleIndex = existsUniqIndex as unknown as AnyIndexBuilder;\n }\n\n if (!drizzleIndex) {\n // Index exists in database but not in schema - create it\n const columns = dbIndex.columns.map((col) => `\\`${col}\\``).join(\", \");\n const unique = dbIndex.unique ? \"UNIQUE \" : \"\";\n changes.push(\n `CREATE ${unique}INDEX if not exists \\`${indexName}\\` ON \\`${tableName}\\` (${columns});`,\n );\n continue;\n }\n\n // Check if index columns changed\n const dbColumns = dbIndex.columns.join(\", \");\n const drizzleColumns = getIndexColumns(drizzleIndex).join(\", \");\n if (\n dbColumns !== drizzleColumns ||\n dbIndex.unique !== drizzleIndex instanceof UniqueConstraintBuilder\n ) {\n // Drop and recreate index using database values\n changes.push(`DROP INDEX \\`${indexName}\\` ON \\`${tableName}\\`;`);\n const columns = dbIndex.columns.map((col) => `\\`${col}\\``).join(\", \");\n const unique = dbIndex.unique ? \"UNIQUE \" : \"\";\n changes.push(\n `CREATE ${unique}INDEX if not exists \\`${indexName}\\` ON \\`${tableName}\\` (${columns});`,\n );\n }\n }\n\n // First check foreign keys that exist in database but not in schema\n for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {\n // Find if this column is referenced in Drizzle schema\n const drizzleFK = metadata.foreignKeys.find(\n (fk) =>\n getForeignKeyName(fk) === fkName ||\n compareForeignKey(fk, { columns: [dbFK.column], unique: false }),\n );\n\n if (!drizzleFK) {\n // Foreign key exists in database but not in schema - drop it\n changes.push(\n `ALTER TABLE \\`${tableName}\\` ADD CONSTRAINT \\`${fkName}\\` FOREIGN KEY (\\`${dbFK.column}\\`) REFERENCES \\`${dbFK.referencedTable}\\` (\\`${dbFK.referencedColumn}\\`);`,\n );\n continue;\n }\n }\n\n // Then check for new foreign keys that exist in schema but not in database\n for (const drizzleForeignKey of metadata.foreignKeys) {\n // Find if this foreign key exists in database\n const isDbFk = Object.keys(dbTable.foreignKeys).find((fk) => {\n let foreignKey = dbTable.foreignKeys[fk];\n return (\n fk === getForeignKeyName(drizzleForeignKey) ||\n compareForeignKey(drizzleForeignKey, { columns: [foreignKey.column], unique: false })\n );\n });\n\n if (!isDbFk) {\n // Foreign key exists in schema but not in database - create it\n if (drizzleForeignKey) {\n const fkName = getForeignKeyName(drizzleForeignKey);\n if (fkName) {\n changes.push(`ALTER TABLE \\`${tableName}\\` DROP FOREIGN KEY \\`${fkName}\\`;`);\n } else {\n // @ts-ignore\n const columns = drizzleForeignKey.columns;\n const columnNames = columns?.length\n ? columns.map((c: any) => c.name).join(\", \")\n : \"unknown columns\";\n console.warn(\n `⚠️ Drizzle model for table '${tableName}' does not provide a name for FOREIGN KEY constraint on columns: ${columnNames}`,\n );\n }\n }\n }\n }\n }\n }\n\n return changes;\n}\n\n/**\n * Updates an existing database migration by generating schema modifications.\n * @param options - Database connection settings and output paths.\n */\nexport const updateMigration = async (options: any) => {\n try {\n let version = await loadMigrationVersion(options.output);\n const prevVersion = version;\n\n if (version < 1) {\n console.log(\n `⚠️ Initial migration not found. Run \"npx forge-sql-orm migrations:create\" first.`,\n );\n process.exit(0);\n }\n version += 1;\n\n // Create database connection\n const connection = await mysql.createConnection({\n host: options.host,\n port: options.port,\n user: options.user,\n password: options.password,\n database: options.dbName,\n });\n\n try {\n // Get current database schema\n const dbSchema = await getDatabaseSchema(connection, options.dbName);\n\n // Import Drizzle schema using absolute path\n const schemaPath = path.resolve(options.entitiesPath, \"schema.ts\");\n if (!fs.existsSync(schemaPath)) {\n throw new Error(`Schema file not found at: ${schemaPath}`);\n }\n\n const schemaModule = await import(schemaPath);\n if (!schemaModule) {\n throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);\n }\n\n // Process exported tables\n const drizzleSchema: DrizzleSchema = {};\n\n // Get all exports that are tables\n const tables = Object.values(schemaModule) as MySqlTable<TableConfig>[];\n\n tables.forEach((table) => {\n const metadata = getTableMetadata(table);\n if (metadata.tableName) {\n // Convert AnyColumn to DrizzleColumn\n const columns: Record<string, DrizzleColumn> = {};\n Object.entries(metadata.columns).forEach(([name, column]) => {\n columns[name] = {\n type: column.dataType,\n notNull: column.notNull,\n autoincrement: (column as any).autoincrement,\n columnType: column.columnType,\n name: column.name,\n getSQLType: () => column.getSQLType(),\n };\n });\n drizzleSchema[metadata.tableName] = columns;\n }\n });\n\n if (Object.keys(drizzleSchema).length === 0) {\n throw new Error(`No valid tables found in schema at: ${schemaPath}`);\n }\n\n console.log(\"Found tables:\", Object.keys(drizzleSchema));\n\n // Generate SQL changes\n const createStatements = filterWithPreviousMigration(\n generateSchemaChanges(drizzleSchema, dbSchema, schemaModule),\n prevVersion,\n options.output,\n );\n\n if (createStatements.length) {\n // Generate migration file content\n const migrationFile = generateMigrationFile(createStatements, version);\n\n // Save migration files only if there are actual changes\n if (saveMigrationFiles(migrationFile, version, options.output)) {\n console.log(`✅ Migration successfully updated!`);\n }\n process.exit(0);\n } else {\n console.log(`⚠️ No new migration changes detected.`);\n process.exit(0);\n }\n } finally {\n await connection.end();\n }\n } catch (error) {\n console.error(`❌ Error during migration update:`, error);\n process.exit(1);\n }\n};\n","import \"reflect-metadata\";\nimport fs from \"fs\";\nimport path from \"path\";\nimport { MySqlTable, TableConfig } from \"drizzle-orm/mysql-core\";\nimport { getTableMetadata, generateDropTableStatements } from \"forge-sql-orm\";\n\n/**\n * Generates a migration ID using current date\n * @returns Migration ID string with current date\n */\nfunction generateMigrationUUID(version: number): string {\n const now = new Date();\n const timestamp = now.getTime();\n return `MIGRATION_V${version}_${timestamp}`;\n}\n\n/**\n * Generates a migration file using the provided SQL statements.\n * @param createStatements - Array of SQL statements.\n * @param version - Migration version number.\n * @returns TypeScript migration file content.\n */\nfunction generateMigrationFile(createStatements: string[], version: number): string {\n const uniqId = generateMigrationUUID(version);\n // Clean each SQL statement and generate migration lines with .enqueue()\n const migrationLines = createStatements\n .map(\n (stmt, index) => ` .enqueue(\"${uniqId}_${index}\", \\\"${stmt}\\\")`, // eslint-disable-line no-useless-escape\n )\n .join(\"\\n\");\n\n // Migration template\n return `import { MigrationRunner } from \"@forge/sql/out/migration\";\n\nexport default (migrationRunner: MigrationRunner): MigrationRunner => {\n return migrationRunner\n${migrationLines};\n};`;\n}\n\n/**\n * Saves the generated migration file along with `migrationCount.ts` and `index.ts`.\n * @param migrationCode - The migration code to be written to the file.\n * @param version - Migration version number.\n * @param outputDir - Directory where the migration files will be saved.\n */\nfunction saveMigrationFiles(migrationCode: string, version: number, outputDir: string) {\n if (!fs.existsSync(outputDir)) {\n fs.mkdirSync(outputDir, { recursive: true });\n }\n\n const migrationFilePath = path.join(outputDir, `migrationV${version}.ts`);\n const migrationCountPath = path.join(outputDir, `migrationCount.ts`);\n const indexFilePath = path.join(outputDir, `index.ts`);\n\n // Write the migration file\n fs.writeFileSync(migrationFilePath, migrationCode);\n\n // Write the migration count file\n fs.writeFileSync(migrationCountPath, `export const MIGRATION_VERSION = ${version};`);\n\n // Generate the migration index file\n const indexFileContent = `import { MigrationRunner } from \"@forge/sql/out/migration\";\nimport { MIGRATION_VERSION } from \"./migrationCount\";\n\nexport type MigrationType = (\n migrationRunner: MigrationRunner,\n) => MigrationRunner;\n\nexport default async (\n migrationRunner: MigrationRunner,\n): Promise<MigrationRunner> => {\n for (let i = 1; i <= MIGRATION_VERSION; i++) {\n const migrations = (await import(\\`./migrationV\\${i}\\`)) as {\n default: MigrationType;\n };\n migrations.default(migrationRunner);\n }\n return migrationRunner;\n};`;\n\n fs.writeFileSync(indexFilePath, indexFileContent);\n\n console.log(`✅ Migration file created: ${migrationFilePath}`);\n console.log(`✅ Migration count file updated: ${migrationCountPath}`);\n console.log(`✅ Migration index file created: ${indexFilePath}`);\n}\n\n/**\n * Creates a full database migration.\n * @param options - Database connection settings and output paths.\n */\nexport const dropMigration = async (options: any) => {\n try {\n // Start from version 1 if no previous migrations exist\n const version = 1;\n\n // Import Drizzle schema using absolute path\n const schemaPath = path.resolve(options.entitiesPath, \"schema.ts\");\n if (!fs.existsSync(schemaPath)) {\n throw new Error(`Schema file not found at: ${schemaPath}`);\n }\n\n const schemaModule = await import(schemaPath);\n if (!schemaModule) {\n throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);\n }\n\n // Get all exports that are tables\n const tables = Object.values(schemaModule) as MySqlTable<TableConfig>[];\n\n if (tables.length === 0) {\n throw new Error(`No valid tables found in schema at: ${schemaPath}`);\n }\n\n // Get table names for logging\n const tableNames = tables\n .map((table) => {\n const metadata = getTableMetadata(table);\n return metadata.tableName;\n })\n .filter(Boolean);\n\n console.log(\"Found tables:\", tableNames);\n\n // Generate drop statements\n const dropStatements = generateDropTableStatements(tableNames);\n\n // Generate and save migration files\n const migrationFile = generateMigrationFile(dropStatements, version);\n saveMigrationFiles(migrationFile, version, options.output);\n\n console.log(`✅ Migration successfully created!`);\n process.exit(0);\n } catch (error) {\n console.error(`❌ Error during migration creation:`, error);\n process.exit(1);\n }\n};\n","#!/usr/bin/env node\n\nimport { Command } from \"commander\";\nimport dotenv from \"dotenv\";\nimport inquirer from \"inquirer\";\nimport fs from \"fs\";\nimport path from \"path\";\nimport { generateModels } from \"./actions/generate-models\";\nimport { createMigration } from \"./actions/migrations-create\";\nimport { updateMigration } from \"./actions/migrations-update\";\nimport { dropMigration } from \"./actions/migrations-drops\";\n\nconst ENV_PATH = path.resolve(process.cwd(), \".env\");\n// 🔄 Load environment variables from `.env` file\ndotenv.config({ path: ENV_PATH });\n\nconst saveEnvFile = (config: any) => {\n let envContent = \"\";\n const envFilePath = ENV_PATH;\n\n if (fs.existsSync(envFilePath)) {\n envContent = fs.readFileSync(envFilePath, \"utf8\");\n }\n\n const envVars = envContent\n .split(\"\\n\")\n .filter((line) => line.trim() !== \"\" && !line.startsWith(\"#\"))\n .reduce((acc: any, line) => {\n const [key, ...value] = line.split(\"=\");\n acc[key] = value.join(\"=\");\n return acc;\n }, {});\n\n Object.entries(config).forEach(([key, value]) => {\n envVars[`FORGE_SQL_ORM_${key.toUpperCase()}`] = value;\n });\n\n const updatedEnvContent = Object.entries(envVars)\n .map(([key, value]) => `${key}=${value}`)\n .join(\"\\n\");\n\n fs.writeFileSync(envFilePath, updatedEnvContent, { encoding: \"utf8\" });\n\n console.log(\"✅ Configuration saved to .env without overwriting other variables.\");\n};\n\n/**\n * Prompts the user for missing parameters using Inquirer.js.\n * @param config - The current configuration object.\n * @param defaultOutput - Default output path.\n * @param customAskMissingParams - Optional function for additional prompts.\n * @returns Updated configuration with user input.\n */\nconst askMissingParams = async (\n config: any,\n defaultOutput: string,\n customAskMissingParams?: (cfg: any, questions: unknown[]) => void,\n) => {\n const questions: unknown[] = [];\n\n if (!config.host)\n questions.push({\n type: \"input\",\n name: \"host\",\n message: \"Enter database host:\",\n default: \"localhost\",\n });\n\n if (!config.port)\n questions.push({\n type: \"input\",\n name: \"port\",\n message: \"Enter database port:\",\n default: \"3306\",\n validate: (input: string) => !isNaN(parseInt(input, 10)),\n });\n\n if (!config.user)\n questions.push({\n type: \"input\",\n name: \"user\",\n message: \"Enter database user:\",\n default: \"root\",\n });\n\n if (!config.password)\n questions.push({\n type: \"password\",\n name: \"password\",\n message: \"Enter database password:\",\n mask: \"*\",\n });\n\n if (!config.dbName)\n questions.push({\n type: \"input\",\n name: \"dbName\",\n message: \"Enter database name:\",\n });\n\n if (!config.output)\n questions.push({\n type: \"input\",\n name: \"output\",\n message: \"Enter output path:\",\n default: defaultOutput,\n });\n\n // Allow additional questions from the caller\n if (customAskMissingParams) {\n customAskMissingParams(config, questions);\n }\n\n // If there are missing parameters, prompt the user\n if (questions.length > 0) {\n // @ts-ignore - Ignore TypeScript warning for dynamic question type\n const answers = await inquirer.prompt(questions);\n return { ...config, ...answers, port: parseInt(config.port ?? answers.port, 10) };\n }\n\n return config;\n};\n\n/**\n * Retrieves configuration parameters from command-line arguments and environment variables.\n * If any required parameters are missing, prompts the user for input.\n * @param cmd - The command object containing CLI options.\n * @param defaultOutput - Default output directory.\n * @param customConfig - Optional function for additional configuration parameters.\n * @param customAskMissingParams - Optional function for additional prompts.\n * @returns A fully resolved configuration object.\n */\nconst getConfig = async (\n cmd: any,\n defaultOutput: string,\n customConfig?: () => any,\n customAskMissingParams?: (cfg: any, questions: unknown[]) => void,\n) => {\n let config = {\n host: cmd.host || process.env.FORGE_SQL_ORM_HOST,\n port: cmd.port\n ? parseInt(cmd.port, 10)\n : process.env.FORGE_SQL_ORM_PORT\n ? parseInt(process.env.FORGE_SQL_ORM_PORT, 10)\n : undefined,\n user: cmd.user || process.env.FORGE_SQL_ORM_USER,\n password: cmd.password || process.env.FORGE_SQL_ORM_PASSWORD,\n dbName: cmd.dbName || process.env.FORGE_SQL_ORM_DBNAME,\n output: cmd.output || process.env.FORGE_SQL_ORM_OUTPUT,\n };\n\n // Merge additional configurations if provided\n if (customConfig) {\n config = { ...config, ...customConfig() };\n }\n\n const conf = await askMissingParams(config, defaultOutput, customAskMissingParams);\n if (cmd.saveEnv) {\n saveEnvFile(conf);\n }\n return conf;\n};\n\n// 📌 Initialize CLI\nexport const program = new Command();\nprogram.version(\"1.0.0\");\n\n// ✅ Command: Generate database models (Entities)\nprogram\n .command(\"generate:model\")\n .description(\"Generate Drizzle models from the database.\")\n .option(\"--host <string>\", \"Database host\")\n .option(\"--port <number>\", \"Database port\")\n .option(\"--user <string>\", \"Database user\")\n .option(\"--password <string>\", \"Database password\")\n .option(\"--dbName <string>\", \"Database name\")\n .option(\"--output <string>\", \"Output path for entities\")\n .option(\"--versionField <string>\", \"Field name for versioning\")\n .option(\"--saveEnv\", \"Save configuration to .env file\")\n .action(async (cmd) => {\n const config = await getConfig(\n cmd,\n \"./database/entities\",\n ()