UNPKG

forge-sql-orm-cli

Version:
1,072 lines (1,055 loc) 43 kB
#!/usr/bin/env node "use strict"; Object.defineProperties(exports, { __esModule: { value: true }, [Symbol.toStringTag]: { value: "Module" } }); const commander = require("commander"); const dotenv = require("dotenv"); const inquirer = require("inquirer"); const fs = require("fs"); const path = require("path"); require("reflect-metadata"); const child_process = require("child_process"); const mysql = require("mysql2/promise"); const forgeSqlOrm = require("forge-sql-orm"); const uniqueConstraint = require("drizzle-orm/mysql-core/unique-constraint"); const uuid = require("uuid"); function replaceMySQLTypes(schemaContent) { const imports = `import { forgeDateTimeString, forgeTimeString, forgeDateString, forgeTimestampString } from "forge-sql-orm"; `; let modifiedContent = schemaContent.replace( /datetime\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "forgeDateTimeString('$1')" ).replace(/datetime\(['"]([^'"]+)['"]\)/g, "forgeDateTimeString('$1')").replace(/datetime\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "forgeDateTimeString()").replace(/time\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "forgeTimeString('$1')").replace(/time\(['"]([^'"]+)['"]\)/g, "forgeTimeString('$1')").replace(/time\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "forgeTimeString()").replace(/date\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "forgeDateString('$1')").replace(/date\(['"]([^'"]+)['"]\)/g, "forgeDateString('$1')").replace(/date\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "forgeDateString()").replace( /timestamp\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "forgeTimestampString('$1')" ).replace(/timestamp\(['"]([^'"]+)['"]\)/g, "forgeTimestampString('$1')").replace(/timestamp\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "forgeTimestampString()"); if (!modifiedContent.includes("import { forgeDateTimeString")) { modifiedContent = imports + modifiedContent; } return modifiedContent; } const generateModels = async (options) => { try { await child_process.execSync( `npx drizzle-kit pull --dialect mysql --url mysql://${options.user}:${options.password}@${options.host}:${options.port}/${options.dbName} --out ${options.output}`, { encoding: "utf-8" } ); const metaDir = path.join(options.output, "meta"); const additionalMetadata = {}; if (fs.existsSync(metaDir)) { const snapshotFile = path.join(metaDir, "0000_snapshot.json"); if (fs.existsSync(snapshotFile)) { const snapshotData = JSON.parse(fs.readFileSync(snapshotFile, "utf-8")); for (const [tableName, tableData] of Object.entries(snapshotData.tables)) { const table = tableData; if (tableName.toLowerCase().startsWith("a_")) { console.warn( `⚠️ Table "${tableName}" starts with "a_". KVS Cache will not work with this table because such tables are ignored in cache operations.` ); } const versionField = Object.entries(table.columns).find( ([_, col]) => col.name.toLowerCase() === options.versionField ); if (versionField) { const [_, col] = versionField; const fieldType = col.type; const isSupportedType = fieldType === "datetime" || fieldType === "timestamp" || fieldType === "int" || fieldType === "number" || fieldType === "decimal"; if (!col.notNull) { console.warn( `Version field "${col.name}" in table ${tableName} is nullable. Versioning may not work correctly.` ); } else if (!isSupportedType) { console.warn( `Version field "${col.name}" in table ${tableName} has unsupported type "${fieldType}". Only datetime, timestamp, int, and decimal types are supported for versioning. Versioning will be skipped.` ); } else { additionalMetadata[tableName] = { tableName, versionField: { fieldName: col.name } }; } } } } } const versionMetadataContent = `/** * This file was auto-generated by forge-sql-orm * Generated at: ${(/* @__PURE__ */ new Date()).toISOString()} * * DO NOT EDIT THIS FILE MANUALLY * Any changes will be overwritten on next generation */ export * from "./relations"; export * from "./schema"; export interface VersionFieldMetadata { fieldName: string; } export interface TableMetadata { tableName: string; versionField: VersionFieldMetadata; } export type AdditionalMetadata = Record<string, TableMetadata>; export const additionalMetadata: AdditionalMetadata = ${JSON.stringify(additionalMetadata, null, 2)}; `; fs.writeFileSync(path.join(options.output, "index.ts"), versionMetadataContent); const schemaPath = path.join(options.output, "schema.ts"); if (fs.existsSync(schemaPath)) { const schemaContent = fs.readFileSync(schemaPath, "utf-8"); const modifiedContent = replaceMySQLTypes(schemaContent); fs.writeFileSync(schemaPath, modifiedContent); console.log(`✅ Updated schema types in: ${schemaPath}`); } const migrationDir = path.join(options.output, "migrations"); if (fs.existsSync(migrationDir)) { fs.rmSync(migrationDir, { recursive: true, force: true }); console.log(`✅ Removed: ${migrationDir}`); } if (fs.existsSync(metaDir)) { const journalFile = path.join(metaDir, "_journal.json"); if (fs.existsSync(journalFile)) { const journalData = JSON.parse(fs.readFileSync(journalFile, "utf-8")); for (const entry of journalData.entries) { const sqlFile = path.join(options.output, `${entry.tag}.sql`); if (fs.existsSync(sqlFile)) { fs.rmSync(sqlFile, { force: true }); console.log(`✅ Removed SQL file: ${entry.tag}.sql`); } } } fs.rmSync(metaDir, { recursive: true, force: true }); console.log(`✅ Removed: ${metaDir}`); } console.log(`✅ Successfully generated models and version metadata`); process.exit(0); } catch (error) { console.error(`❌ Error during model generation:`, error); process.exit(1); } }; const loadMigrationVersion$1 = async (migrationPath) => { try { const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts")); if (!fs.existsSync(migrationCountFilePath)) { console.log(`✅ Current migration version: 0`); return 0; } const { MIGRATION_VERSION } = await import(migrationCountFilePath); console.log(`✅ Current migration version: ${MIGRATION_VERSION}`); return MIGRATION_VERSION; } catch (error) { console.error(`❌ Error loading migrationCount:`, error); process.exit(1); } }; const SQL_KIND_REGEX = /CREATE (?!.*IF NOT EXISTS)(UNIQUE INDEX|INDEX|TABLE) /gim; function insertNotExists(content) { SQL_KIND_REGEX.lastIndex = 0; content = content.replace(SQL_KIND_REGEX, "CREATE $1 IF NOT EXISTS "); return content; } function cleanSQLStatement(sql) { sql = insertNotExists(sql); return sql.replace(/\s+default\s+character\s+set\s+utf8mb4\s+engine\s*=\s*InnoDB;?/gi, "").trim(); } function generateMigrationFile$2(createStatements, version) { const versionPrefix = `v${version}_MIGRATION`; const migrationLines = createStatements.map( (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement(stmt).replace(/\s+/g, " ")}")` ).join("\n"); return `import { MigrationRunner } from "@forge/sql/out/migration"; export default (migrationRunner: MigrationRunner): MigrationRunner => { return migrationRunner ${migrationLines}; };`; } function saveMigrationFiles$2(migrationCode, version, outputDir) { if (!fs.existsSync(outputDir)) { fs.mkdirSync(outputDir, { recursive: true }); } const migrationFilePath = path.join(outputDir, `migrationV${version}.ts`); const migrationCountPath = path.join(outputDir, `migrationCount.ts`); const indexFilePath = path.join(outputDir, `index.ts`); fs.writeFileSync(migrationFilePath, migrationCode); fs.writeFileSync(migrationCountPath, `export const MIGRATION_VERSION = ${version};`); const importLines = []; for (let i = 1; i <= version; i++) { importLines.push(`import v${i} from "./migrationV${i}";`); } const callLines = []; for (let i = 1; i <= version; i++) { callLines.push(` v${i}(migrationRunner);`); } const indexFileContent = `import { MigrationRunner } from "@forge/sql/out/migration"; ${importLines.join("\n")} export type MigrationType = ( migrationRunner: MigrationRunner, ) => MigrationRunner; export default async ( migrationRunner: MigrationRunner, ): Promise<MigrationRunner> => { ${callLines.join("\n")} return migrationRunner; };`; fs.writeFileSync(indexFilePath, indexFileContent); console.log(`✅ Migration file created: ${migrationFilePath}`); console.log(`✅ Migration count file updated: ${migrationCountPath}`); console.log(`✅ Migration index file created: ${indexFilePath}`); } async function getTables(connection) { const [rows] = await connection.execute("SHOW TABLES"); return rows.map((row) => Object.values(row)[0]); } async function getCreateTableStatement(connection, tableName) { const [rows] = await connection.execute(`SHOW CREATE TABLE \`${tableName}\``); const result = rows; if (result.length > 0 && result[0]["Create Table"]) { return result[0]["Create Table"]; } return null; } async function getAllCreateTableStatements(connection) { const tables = await getTables(connection); const statements = []; for (const table of tables) { const createTable = await getCreateTableStatement(connection, table); if (createTable) { statements.push(createTable); } } return statements; } const createMigration = async (options) => { try { let version = await loadMigrationVersion$1(options.output); if (version > 0) { if (options.force) { console.warn( `⚠️ Warning: Migration already exists. Creating new migration with force flag...` ); } else { console.error( `❌ Error: Migration has already been created. Use --force flag to override.` ); process.exit(1); } } if (!options.host || !options.port || !options.user || !options.password || !options.dbName) { console.error( `❌ Error: Database connection parameters are required (host, port, user, password, dbName)` ); process.exit(1); } const connection = await mysql.createConnection({ host: options.host, port: options.port, user: options.user, password: options.password, database: options.dbName }); try { console.log(`✅ Connected to database: ${options.dbName}`); console.log(`📋 Fetching CREATE TABLE statements from database...`); const createStatements = await getAllCreateTableStatements(connection); if (createStatements.length === 0) { console.warn(`⚠️ Warning: No tables found in the database.`); } else { console.log(`✅ Found ${createStatements.length} table(s)`); } const migrationFile = generateMigrationFile$2(createStatements, 1); saveMigrationFiles$2(migrationFile, 1, options.output); console.log(`✅ Migration successfully created!`); process.exit(0); } finally { await connection.end(); } } catch (error) { console.error(`❌ Error during migration creation:`, error); process.exit(1); } }; function buildDefault(preMigration) { const def = preMigration.defaultValue; const type = preMigration.type.toLowerCase(); if (def === void 0 || def === null) { return ""; } if (def === "") { return `''`; } const stringTypes = /* @__PURE__ */ new Set([ "char", "varchar", "text", "tinytext", "mediumtext", "longtext", "enum", "set", "binary", "varbinary", "blob" ]); const numericTypes = /* @__PURE__ */ new Set([ "tinyint", "smallint", "mediumint", "int", "bigint", "decimal", "float", "double", "bit" ]); const isNumericLiteral = /^[+-]?\d+(\.\d+)?$/.test(def); if (numericTypes.has(type) && isNumericLiteral) { return `${def}`; } if (stringTypes.has(type)) { const escaped = def.replace(/'/g, "''"); return `'${escaped}'`; } return `${def}`; } function generateWarningMessage(tableName, colName, version) { return `⚠️ WARNING: Field \`${tableName}\`.\`${colName}\` requires a default value for existing NULL records. Action required in migration file: migrationV${version}.ts Find the line with: UPDATE \`${tableName}\` SET \`${colName}\` = ? Replace '?' with an actual value (e.g., '' for strings, 0 for numbers, '1970-01-01' for dates) OR remove this migration if it's not needed.`; } function handleMissingDefaultValue(preMigration, version, migrationLineList) { const warningMsg = generateWarningMessage(preMigration.tableName, preMigration.colName, version); console.warn(warningMsg); migrationLineList.push(`console.error(${JSON.stringify(warningMsg)});`); } function getUpdateDefaultValue(preMigration, defaultValue) { return defaultValue === "?" ? defaultValue : buildDefault(preMigration); } function generateUpdateStatement(preMigration, defaultValue) { const updateValue = getUpdateDefaultValue(preMigration, defaultValue); return `UPDATE \`${preMigration.tableName}\` SET \`${preMigration.colName}\` = ${updateValue} WHERE \`${preMigration.colName}\` IS NULL`; } function generateMigrationFile$1(createStatements, version) { const versionPrefix = `v${version}_MIGRATION`; const migrationLineList = []; createStatements.changes.forEach((change, index) => { if (!change.premigrationId) { migrationLineList.push( ` migrationRunner.enqueue("${versionPrefix}${index}", "${change.change}")` ); return; } const preMigration = createStatements.preMigrations[change.premigrationId]; if (!preMigration) { migrationLineList.push( ` migrationRunner.enqueue("${versionPrefix}${index}", "${change.change}")` ); return; } const defaultValue = preMigration.defaultValue === void 0 || preMigration.defaultValue === null ? "?" : preMigration.defaultValue; const needsWarning = defaultValue === "?"; if (preMigration.migrationType === "NEW_FIELD_NOT_NULL") { const addColumnStatement = change.change.replace("NOT NULL", "NULL"); migrationLineList.push( ` migrationRunner.enqueue("${versionPrefix}${index}_NULLABLE", "${addColumnStatement}");` ); if (needsWarning) { handleMissingDefaultValue(preMigration, version, migrationLineList); } const updateStatement = generateUpdateStatement(preMigration, defaultValue); migrationLineList.push( ` migrationRunner.enqueue("${versionPrefix}${index}_UPDATE_EXISTS_RECORDS", "${updateStatement}");` ); const defaultClause = defaultValue === "?" ? "" : ` DEFAULT ${buildDefault(preMigration)}`; const modifyStatement = `ALTER TABLE \`${preMigration.tableName}\` MODIFY COLUMN IF EXISTS \`${preMigration.colName}\` ${preMigration.type} NOT NULL${defaultClause};`; migrationLineList.push( ` migrationRunner.enqueue("${versionPrefix}${index}", "${modifyStatement}");` ); } else if (preMigration.migrationType === "MODIFY_NOT_NULL") { if (needsWarning) { handleMissingDefaultValue(preMigration, version, migrationLineList); } const updateStatement = generateUpdateStatement(preMigration, defaultValue); migrationLineList.push( ` migrationRunner.enqueue("${versionPrefix}${index}_UPDATE_EXISTS_RECORDS", "${updateStatement}")` ); migrationLineList.push( ` migrationRunner.enqueue("${versionPrefix}${index}", "${change.change}")` ); } }); const migrationLines = migrationLineList.join("\n"); return `import { MigrationRunner } from "@forge/sql/out/migration"; export default (migrationRunner: MigrationRunner): MigrationRunner => { ${migrationLines}; return migrationRunner; };`; } function filterWithPreviousMigration(newStatements, prevVersion, outputDir) { const prevMigrationPath = path.join(outputDir, `migrationV${prevVersion}.ts`); if (!fs.existsSync(prevMigrationPath)) { return { changes: newStatements.changes.map((s) => ({ change: s.change.replace(/\s+/g, " "), premigrationId: s.premigrationId })), preMigrations: newStatements.preMigrations }; } const prevContent = fs.readFileSync(prevMigrationPath, "utf-8"); const prevStatements = prevContent.split("\n").filter((line) => line.includes(".enqueue(")).map((line) => { const match = line.match(/\.enqueue\([^,]+,\s*"([^"]+)"/); return match ? match[1].replace(/\s+/g, " ").trim() : ""; }); return { preMigrations: newStatements.preMigrations, changes: newStatements.changes.filter((s) => !prevStatements.includes(s.change.replace(/\s+/g, " "))).map((s) => ({ change: s.change.replace(/\s+/g, " "), premigrationId: s.premigrationId })) }; } function saveMigrationFiles$1(migrationCode, version, outputDir) { if (!fs.existsSync(outputDir)) { fs.mkdirSync(outputDir, { recursive: true }); } const migrationFilePath = path.join(outputDir, `migrationV${version}.ts`); const migrationCountPath = path.join(outputDir, `migrationCount.ts`); const indexFilePath = path.join(outputDir, `index.ts`); fs.writeFileSync(migrationFilePath, migrationCode); fs.writeFileSync(migrationCountPath, `export const MIGRATION_VERSION = ${version};`); const importLines = []; const callLines = []; for (let i = 1; i <= version; i++) { importLines.push(`import migrationV${i} from "./migrationV${i}";`); callLines.push(` migrationV${i}(migrationRunner);`); } const indexFileContent = `import { MigrationRunner } from "@forge/sql/out/migration"; ${importLines.join("\n")} export type MigrationType = ( migrationRunner: MigrationRunner, ) => MigrationRunner; export default ( migrationRunner: MigrationRunner, ): MigrationRunner => { ${callLines.join("\n")} return migrationRunner; };`; fs.writeFileSync(indexFilePath, indexFileContent); console.log(`✅ Migration file created: ${migrationFilePath}`); console.log(`✅ Migration count file updated: ${migrationCountPath}`); console.log(`✅ Migration index file created: ${indexFilePath}`); return true; } const loadMigrationVersion = async (migrationPath) => { try { const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts")); if (!fs.existsSync(migrationCountFilePath)) { console.warn( `⚠️ Warning: migrationCount.ts not found in ${migrationCountFilePath}, assuming no previous migrations.` ); return 0; } const { MIGRATION_VERSION } = await import(migrationCountFilePath); console.log(`✅ Current migration version: ${MIGRATION_VERSION}`); return MIGRATION_VERSION; } catch (error) { console.error(`❌ Error loading migrationCount:`, error); process.exit(1); } }; async function getDatabaseSchema(connection, dbName) { const [columns] = await connection.execute( ` SELECT TABLE_NAME, COLUMN_NAME, COLUMN_TYPE, IS_NULLABLE, COLUMN_KEY, EXTRA, COLUMN_DEFAULT FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = ? `, [dbName] ); const [indexes] = await connection.execute( ` SELECT TABLE_NAME, INDEX_NAME, COLUMN_NAME, NON_UNIQUE FROM INFORMATION_SCHEMA.STATISTICS WHERE TABLE_SCHEMA = ? ORDER BY TABLE_NAME, INDEX_NAME, SEQ_IN_INDEX `, [dbName] ); const [foreignKeys] = await connection.execute( ` SELECT TABLE_NAME, COLUMN_NAME, CONSTRAINT_NAME, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE WHERE TABLE_SCHEMA = ? AND REFERENCED_TABLE_NAME IS NOT NULL `, [dbName] ); const schema = {}; columns.forEach((row) => { if (!schema[row.TABLE_NAME]) { schema[row.TABLE_NAME] = { columns: {}, indexes: {}, foreignKeys: {} }; } schema[row.TABLE_NAME].columns[row.COLUMN_NAME] = row; }); indexes.forEach((row) => { if (!schema[row.TABLE_NAME].indexes[row.INDEX_NAME]) { schema[row.TABLE_NAME].indexes[row.INDEX_NAME] = { columns: [], unique: !row.NON_UNIQUE }; } schema[row.TABLE_NAME].indexes[row.INDEX_NAME].columns.push(row.COLUMN_NAME); }); foreignKeys.forEach((row) => { if (!schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME]) { schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME] = { column: row.COLUMN_NAME, referencedTable: row.REFERENCED_TABLE_NAME, referencedColumn: row.REFERENCED_COLUMN_NAME }; } }); return schema; } function normalizeMySQLType(mysqlType) { let normalized = mysqlType.replace(/\([^)]*\)/, "").toLowerCase(); normalized = normalized.replace(/^mysql/, ""); return normalized; } function getForeignKeyName(fk) { return fk.name; } function getIndexName(index) { return index.name; } function getUniqueConstraintName(uc) { return uc.name; } function getIndexColumns(index) { return index.columns.map((col) => col.name); } function compareForeignKey(fk, { columns }) { const fcolumns = fk.columns.map((c) => c.name); return fcolumns.sort().join(",") === columns.sort().join(","); } function generateSchemaChanges(drizzleSchema, dbSchema, schemaModule) { const changes = []; const preMigrations = {}; for (const [tableName, dbTable] of Object.entries(dbSchema)) { const drizzleColumns = drizzleSchema[tableName]; if (!drizzleColumns) { const columns = Object.entries(dbTable.columns).map(([colName, col]) => { const type = col.COLUMN_TYPE; const nullable = col.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL"; const autoIncrement = col.EXTRA.includes("auto_increment") ? "AUTO_INCREMENT" : ""; return `\`${colName}\` ${type} ${nullable} ${autoIncrement}`.trim(); }).join(",\n "); changes.push({ change: `CREATE TABLE if not exists \`${tableName}\` ( ${columns} );` }); for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) { if (indexName === "PRIMARY") { continue; } const isForeignKeyIndex = dbIndex.columns.some((colName) => { const column = dbTable.columns[colName]; return column && column.COLUMN_KEY === "MUL" && column.EXTRA.includes("foreign key"); }); if (isForeignKeyIndex) { continue; } const columns2 = dbIndex.columns.map((col) => `\`${col}\``).join(", "); const unique = dbIndex.unique ? "UNIQUE " : ""; changes.push({ change: `CREATE ${unique}INDEX if not exists \`${indexName}\` ON \`${tableName}\` (${columns2});` }); } for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) { changes.push({ change: `ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);` }); } continue; } for (const [colName, dbCol] of Object.entries(dbTable.columns)) { const drizzleCol = Object.values(drizzleColumns).find((c) => c.name === colName); if (!drizzleCol) { const type = dbCol.COLUMN_TYPE; const nullable2 = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL"; let premigrationId = nullable2 === "NOT NULL" ? uuid.v4() : void 0; const defaultValue = dbCol.COLUMN_DEFAULT; if (nullable2 === "NOT NULL") { premigrationId = uuid.v4(); preMigrations[premigrationId] = { tableName, dbTable, colName, type, migrationType: "NEW_FIELD_NOT_NULL", defaultValue }; } changes.push({ change: `ALTER TABLE \`${tableName}\` ADD COLUMN IF NOT EXISTS \`${colName}\` ${type} ${nullable2} ${defaultValue === void 0 || defaultValue === null ? "" : `DEFAULT ${buildDefault({ type, defaultValue })}`};`, premigrationId }); continue; } const normalizedDbType = normalizeMySQLType(dbCol.COLUMN_TYPE); const normalizedDrizzleType = normalizeMySQLType(drizzleCol.getSQLType()); const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL"; const dbIsNotNull = nullable === "NOT NULL"; const drizzleIsNotNull = drizzleCol.notNull; const typeChanged = normalizedDbType !== normalizedDrizzleType; const nullabilityChanged = dbIsNotNull !== drizzleIsNotNull; const hasDrizzleDefault = drizzleCol.default !== null && drizzleCol.default !== void 0; const hasDbDefault = dbCol.COLUMN_DEFAULT !== null && dbCol.COLUMN_DEFAULT !== void 0; const defaultChanged = hasDrizzleDefault && hasDbDefault && !drizzleCol.default?.includes(dbCol.COLUMN_DEFAULT); if (typeChanged || nullabilityChanged || defaultChanged) { const type = dbCol.COLUMN_TYPE; const defaultValue = dbCol.COLUMN_DEFAULT; let premigrationId = void 0; if (dbIsNotNull && !drizzleIsNotNull) { premigrationId = uuid.v4(); preMigrations[premigrationId] = { tableName, dbTable, colName, type, migrationType: "MODIFY_NOT_NULL", defaultValue }; } let defaultClause = ""; if (defaultValue !== void 0 && defaultValue !== null) { const defaultValueObj = { type, defaultValue }; defaultClause = ` DEFAULT ${buildDefault(defaultValueObj)}`; } const modifyStatement = `ALTER TABLE \`${tableName}\` MODIFY COLUMN IF EXISTS \`${colName}\` ${type} ${nullable}${defaultClause};`; changes.push({ change: modifyStatement, premigrationId }); } } const table = Object.values(schemaModule).find((t) => { const metadata = forgeSqlOrm.getTableMetadata(t); return metadata.tableName === tableName; }); if (table) { const metadata = forgeSqlOrm.getTableMetadata(table); for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) { if (indexName === "PRIMARY") { continue; } const isForeignKeyIndex = metadata.foreignKeys.some( (fk) => getForeignKeyName(fk) === indexName || compareForeignKey(fk, dbIndex) ); if (isForeignKeyIndex) { continue; } const existsUniqIndex = metadata.uniqueConstraints.find( (uc) => getUniqueConstraintName(uc) === indexName ); let drizzleIndex = metadata.indexes.find((i) => getIndexName(i) === indexName); if (!drizzleIndex && existsUniqIndex) { drizzleIndex = existsUniqIndex; } if (!drizzleIndex) { const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", "); const unique = dbIndex.unique ? "UNIQUE " : ""; changes.push({ change: `CREATE ${unique}INDEX if not exists \`${indexName}\` ON \`${tableName}\` (${columns});` }); continue; } const dbColumns = dbIndex.columns.join(", "); const drizzleColumns2 = getIndexColumns(drizzleIndex).join(", "); if (dbColumns !== drizzleColumns2 || dbIndex.unique !== drizzleIndex instanceof uniqueConstraint.UniqueConstraintBuilder) { changes.push({ change: `DROP INDEX \`${indexName}\` ON \`${tableName}\`;` }); const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", "); const unique = dbIndex.unique ? "UNIQUE " : ""; changes.push({ change: `CREATE ${unique}INDEX if not exists \`${indexName}\` ON \`${tableName}\` (${columns});` }); } } for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) { const drizzleFK = metadata.foreignKeys.find( (fk) => getForeignKeyName(fk) === fkName || compareForeignKey(fk, { columns: [dbFK.column] }) ); if (!drizzleFK) { changes.push({ change: `ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);` }); continue; } } for (const drizzleForeignKey of metadata.foreignKeys) { const isDbFk = Object.keys(dbTable.foreignKeys).find((fk) => { let foreignKey = dbTable.foreignKeys[fk]; return fk === getForeignKeyName(drizzleForeignKey) || compareForeignKey(drizzleForeignKey, { columns: [foreignKey.column] }); }); if (!isDbFk) { if (drizzleForeignKey) { const fkName = getForeignKeyName(drizzleForeignKey); if (fkName) { changes.push({ change: `ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;` }); } else { const columns = drizzleForeignKey.columns; const columnNames = columns?.length ? columns.map((c) => c.name).join(", ") : "unknown columns"; console.warn( `⚠️ Drizzle model for table '${tableName}' does not provide a name for FOREIGN KEY constraint on columns: ${columnNames}` ); } } } } } } return { changes, preMigrations }; } const updateMigration = async (options) => { try { let version = await loadMigrationVersion(options.output); const prevVersion = version; if (version < 1) { console.log( `⚠️ Initial migration not found. Run "npx forge-sql-orm migrations:create" first.` ); process.exit(0); } version += 1; const connection = await mysql.createConnection({ host: options.host, port: options.port, user: options.user, password: options.password, database: options.dbName }); try { const dbSchema = await getDatabaseSchema(connection, options.dbName); const schemaPath = path.resolve(options.entitiesPath, "schema.ts"); if (!fs.existsSync(schemaPath)) { throw new Error(`Schema file not found at: ${schemaPath}`); } const schemaModule = await import(schemaPath); if (!schemaModule) { throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`); } const drizzleSchema = {}; const tables = Object.values(schemaModule); tables.forEach((table) => { const metadata = forgeSqlOrm.getTableMetadata(table); if (metadata.tableName) { const columns = {}; Object.entries(metadata.columns).forEach(([name, column]) => { columns[name] = { type: column.dataType, notNull: column.notNull, autoincrement: column.autoincrement, columnType: column.columnType, name: column.name, default: (() => { if (!column.hasDefault) { return void 0; } const defaultValue = column.default; if (typeof defaultValue === "object" && defaultValue !== null && typeof defaultValue.toQuery === "function") { return defaultValue.toQuery({}).sql; } return String(defaultValue); })(), getSQLType: () => column.getSQLType() }; }); drizzleSchema[metadata.tableName] = columns; } }); if (Object.keys(drizzleSchema).length === 0) { throw new Error(`No valid tables found in schema at: ${schemaPath}`); } console.log("Found tables:", Object.keys(drizzleSchema)); const createStatements = filterWithPreviousMigration( generateSchemaChanges(drizzleSchema, dbSchema, schemaModule), prevVersion, options.output ); if (createStatements.changes.length) { const migrationFile = generateMigrationFile$1(createStatements, version); if (saveMigrationFiles$1(migrationFile, version, options.output)) { console.log(`✅ Migration successfully updated!`); } process.exit(0); } else { console.log(`⚠️ No new migration changes detected.`); process.exit(0); } } finally { await connection.end(); } } catch (error) { console.error(`❌ Error during migration update:`, error); process.exit(1); } }; function generateMigrationUUID(version) { const now = /* @__PURE__ */ new Date(); const timestamp = now.getTime(); return `MIGRATION_V${version}_${timestamp}`; } function generateMigrationFile(createStatements, version) { const uniqId = generateMigrationUUID(version); const migrationLines = createStatements.map( (stmt, index) => ` .enqueue("${uniqId}_${index}", "${stmt}")` // eslint-disable-line no-useless-escape ).join("\n"); return `import { MigrationRunner } from "@forge/sql/out/migration"; export default (migrationRunner: MigrationRunner): MigrationRunner => { return migrationRunner ${migrationLines}; };`; } function saveMigrationFiles(migrationCode, version, outputDir) { if (!fs.existsSync(outputDir)) { fs.mkdirSync(outputDir, { recursive: true }); } const migrationFilePath = path.join(outputDir, `migrationV${version}.ts`); const migrationCountPath = path.join(outputDir, `migrationCount.ts`); const indexFilePath = path.join(outputDir, `index.ts`); fs.writeFileSync(migrationFilePath, migrationCode); fs.writeFileSync(migrationCountPath, `export const MIGRATION_VERSION = ${version};`); const indexFileContent = `import { MigrationRunner } from "@forge/sql/out/migration"; import { MIGRATION_VERSION } from "./migrationCount"; export type MigrationType = ( migrationRunner: MigrationRunner, ) => MigrationRunner; export default async ( migrationRunner: MigrationRunner, ): Promise<MigrationRunner> => { for (let i = 1; i <= MIGRATION_VERSION; i++) { const migrations = (await import(\`./migrationV\${i}\`)) as { default: MigrationType; }; migrations.default(migrationRunner); } return migrationRunner; };`; fs.writeFileSync(indexFilePath, indexFileContent); console.log(`✅ Migration file created: ${migrationFilePath}`); console.log(`✅ Migration count file updated: ${migrationCountPath}`); console.log(`✅ Migration index file created: ${indexFilePath}`); } const dropMigration = async (options) => { try { const version = 1; const schemaPath = path.resolve(options.entitiesPath, "schema.ts"); if (!fs.existsSync(schemaPath)) { throw new Error(`Schema file not found at: ${schemaPath}`); } const schemaModule = await import(schemaPath); if (!schemaModule) { throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`); } const tables = Object.values(schemaModule); if (tables.length === 0) { throw new Error(`No valid tables found in schema at: ${schemaPath}`); } const tableNames = tables.map((table) => { const metadata = forgeSqlOrm.getTableMetadata(table); return metadata.tableName; }).filter(Boolean); console.log("Found tables:", tableNames); const dropStatements = forgeSqlOrm.generateDropTableStatements(tableNames); const migrationFile = generateMigrationFile(dropStatements, version); saveMigrationFiles(migrationFile, version, options.output); console.log(`✅ Migration successfully created!`); process.exit(0); } catch (error) { console.error(`❌ Error during migration creation:`, error); process.exit(1); } }; const ENV_PATH = path.resolve(process.cwd(), ".env"); dotenv.config({ path: ENV_PATH }); const saveEnvFile = (config) => { let envContent = ""; const envFilePath = ENV_PATH; if (fs.existsSync(envFilePath)) { envContent = fs.readFileSync(envFilePath, "utf8"); } const envVars = envContent.split("\n").filter((line) => line.trim() !== "" && !line.startsWith("#")).reduce((acc, line) => { const [key, ...value] = line.split("="); acc[key] = value.join("="); return acc; }, {}); Object.entries(config).forEach(([key, value]) => { envVars[`FORGE_SQL_ORM_${key.toUpperCase()}`] = value; }); const updatedEnvContent = Object.entries(envVars).map(([key, value]) => `${key}=${value}`).join("\n"); fs.writeFileSync(envFilePath, updatedEnvContent, { encoding: "utf8" }); console.log("✅ Configuration saved to .env without overwriting other variables."); }; const askMissingParams = async (config, defaultOutput, customAskMissingParams) => { const questions = []; if (!config.host) questions.push({ type: "input", name: "host", message: "Enter database host:", default: "localhost" }); if (!config.port) questions.push({ type: "input", name: "port", message: "Enter database port:", default: "3306", validate: (input) => !isNaN(parseInt(input, 10)) }); if (!config.user) questions.push({ type: "input", name: "user", message: "Enter database user:", default: "root" }); if (!config.password) questions.push({ type: "password", name: "password", message: "Enter database password:", mask: "*" }); if (!config.dbName) questions.push({ type: "input", name: "dbName", message: "Enter database name:" }); if (!config.output) questions.push({ type: "input", name: "output", message: "Enter output path:", default: defaultOutput }); if (customAskMissingParams) { customAskMissingParams(config, questions); } if (questions.length > 0) { const answers = await inquirer.prompt(questions); return { ...config, ...answers, port: parseInt(config.port ?? answers.port, 10) }; } return config; }; const getConfig = async (cmd, defaultOutput, customConfig, customAskMissingParams) => { let config = { host: cmd.host || process.env.FORGE_SQL_ORM_HOST, port: cmd.port ? parseInt(cmd.port, 10) : process.env.FORGE_SQL_ORM_PORT ? parseInt(process.env.FORGE_SQL_ORM_PORT, 10) : void 0, user: cmd.user || process.env.FORGE_SQL_ORM_USER, password: cmd.password || process.env.FORGE_SQL_ORM_PASSWORD, dbName: cmd.dbName || process.env.FORGE_SQL_ORM_DBNAME, output: cmd.output || process.env.FORGE_SQL_ORM_OUTPUT }; if (customConfig) { config = { ...config, ...customConfig() }; } const conf = await askMissingParams(config, defaultOutput, customAskMissingParams); if (cmd.saveEnv) { saveEnvFile(conf); } return conf; }; const program = new commander.Command(); program.version("1.0.0"); program.command("generate:model").description("Generate Drizzle models from the database.").option("--host <string>", "Database host").option("--port <number>", "Database port").option("--user <string>", "Database user").option("--password <string>", "Database password").option("--dbName <string>", "Database name").option("--output <string>", "Output path for entities").option("--versionField <string>", "Field name for versioning").option("--saveEnv", "Save configuration to .env file").action(async (cmd) => { const config = await getConfig( cmd, "./database/entities", () => ({ versionField: cmd.versionField || process.env.FORGE_SQL_ORM_VERSIONFIELD }), (cfg, questions) => { if (!cfg.versionField) { questions.push({ type: "input", name: "versionField", message: "Enter the field name for versioning (leave empty to skip):", default: "" }); } } ); await generateModels(config); }); program.command("migrations:create").description("Generate an initial migration for the entire database.").option("--host <string>", "Database host").option("--port <number>", "Database port").option("--user <string>", "Database user").option("--password <string>", "Database password").option("--dbName <string>", "Database name").option("--output <string>", "Output path for migrations").option("--force", "Force creation even if migrations exist").option("--saveEnv", "Save configuration to .env file").action(async (cmd) => { const config = await getConfig(cmd, "./database/migration", () => ({ force: cmd.force || false })); await createMigration(config); }); program.command("migrations:update").description("Generate a migration to update the database schema.").option("--host <string>", "Database host").option("--port <number>", "Database port").option("--user <string>", "Database user").option("--password <string>", "Database password").option("--dbName <string>", "Database name").option("--output <string>", "Output path for migrations").option("--entitiesPath <string>", "Path to the folder containing entities").option("--saveEnv", "Save configuration to .env file").action(async (cmd) => { const config = await getConfig( cmd, "./database/migration", () => ({ entitiesPath: cmd.entitiesPath || process.env.FORGE_SQL_ORM_ENTITIESPATH }), (cfg, questions) => { if (!cfg.entitiesPath) questions.push({ type: "input", name: "entitiesPath", message: "Enter the path to entities:", default: "./database/entities" }); } ); await updateMigration(config); }); program.command("migrations:drop").description("Generate a migration to drop all tables and clear migrations history.").option("--host <string>", "Database host").option("--port <number>", "Database port").option("--user <string>", "Database user").option("--password <string>", "Database password").option("--dbName <string>", "Database name").option("--output <string>", "Output path for migrations").option("--entitiesPath <string>", "Path to the folder containing entities").option("--saveEnv", "Save configuration to .env file").action(async (cmd) => { const config = await getConfig( cmd, "./database/migration", () => ({ entitiesPath: cmd.entitiesPath || process.env.FORGE_SQL_ORM_ENTITIESPATH }), (cfg, questions) => { if (!cfg.entitiesPath) questions.push({ type: "input", name: "entitiesPath", message: "Enter the path to entities:", default: "./database/entities" }); } ); await dropMigration(config); }); program.parse(process.argv); exports.program = program; //# sourceMappingURL=cli.js.map