quick-erd
Version:
quick and easy text-based ERD + code generator for migration, query, typescript types and orm entity
805 lines (785 loc) • 30.9 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.detectSrcDir = detectSrcDir;
exports.setupSqlite = setupSqlite;
exports.setupTypescript = setupTypescript;
exports.setupPnpm = setupPnpm;
exports.setupNpmScripts = setupNpmScripts;
exports.setupGitIgnore = setupGitIgnore;
exports.setupEnvFile = setupEnvFile;
exports.setupKnexTsFile = setupKnexTsFile;
exports.setupKnexFile = setupKnexFile;
exports.setupKnexMigration = setupKnexMigration;
exports.generateAutoMigrate = generateAutoMigrate;
const fastest_levenshtein_1 = require("fastest-levenshtein");
const fs_1 = require("fs");
const path_1 = require("path");
const util_1 = require("util");
const file_1 = require("../utils/file");
const mysql_to_text_1 = require("./mysql-to-text");
const pg_to_text_1 = require("./pg-to-text");
const sort_tables_1 = require("./sort-tables");
const sqlite_parser_1 = require("./sqlite-parser");
const text_to_knex_1 = require("./text-to-knex");
const text_to_sqlite_1 = require("./text-to-sqlite");
const mssql_to_text_1 = require("./mssql-to-text");
const enum_1 = require("../core/enum");
const table_1 = require("../core/table");
function detectSrcDir() {
for (const dir of ['src', 'server']) {
if ((0, fs_1.existsSync)(dir)) {
return dir;
}
}
return '.';
}
function setupSqlite(options) {
const dbTsFile = (0, path_1.join)(options.srcDir, 'db.ts');
if ((0, fs_1.existsSync)(dbTsFile)) {
return;
}
(0, file_1.addDependencies)('@types/better-sqlite3', '^7.6.13', 'dev');
(0, file_1.addDependencies)('@types/integer', '^4.0.3', 'dev');
(0, file_1.addDependencies)('better-sqlite3', '^11.9.1');
(0, file_1.addDependencies)('better-sqlite3-schema', '^3.1.7');
(0, file_1.addDependencies)('better-sqlite3-proxy', '^2.10.1');
const code = `
import { toSafeMode, newDB, DBInstance } from 'better-sqlite3-schema'
import { basename, join } from 'path'
function resolveFile(file: string) {
return basename(process.cwd()) == 'dist' ? join('..', file) : file
}
export const dbFile = resolveFile(${(0, util_1.inspect)(options.dbFile)})
export const db: DBInstance = newDB({
path: dbFile,
migrate: false,
})
toSafeMode(db)
`;
(0, file_1.writeSrcFile)(dbTsFile, code);
return;
}
function setupTypescript() {
(0, file_1.addDependencies)('typescript', '^5.8.3', 'dev');
(0, file_1.addDependencies)('ts-node', '^10.9.2', 'dev');
(0, file_1.addDependencies)('@types/node', '^22.14.1', 'dev');
setupTsConfigFile();
}
function setupPnpm() {
let file = 'package.json';
let text = (0, fs_1.readFileSync)(file).toString();
let pkg = JSON.parse(text);
pkg.pnpm ||= {};
pkg.pnpm.onlyBuiltDependencies ||= [];
let deps = ['better-sqlite3', 'esbuild'];
let changed = false;
for (let dep of deps) {
if (text.includes(dep) && !pkg.pnpm.onlyBuiltDependencies.includes(dep)) {
pkg.pnpm.onlyBuiltDependencies.push(dep);
changed = true;
}
}
if (changed) {
text = JSON.stringify(pkg, null, 2);
(0, file_1.writeSrcFile)(file, text);
}
}
function setupTsConfigFile() {
const file = 'tsconfig.json';
if ((0, fs_1.existsSync)(file))
return;
const config = {
compilerOptions: {
target: 'es2022',
module: 'commonjs',
esModuleInterop: true,
forceConsistentCasingInFileNames: true,
strict: true,
skipLibCheck: true,
incremental: true,
outDir: 'dist',
},
exclude: ['dist'],
};
const text = JSON.stringify(config, null, 2);
(0, file_1.writeSrcFile)(file, text);
}
function setupNpmScripts(options) {
const scripts = (0, file_1.readNpmScripts)();
function hasScript(pattern) {
return Object.values(scripts).some(script => script.includes(pattern));
}
if (hasScript('auto-migrate') &&
(hasScript('erd-to-proxy') || hasScript('erd-to-types'))) {
return;
}
(0, file_1.addDependencies)('npm-run-all', '^4.1.5', 'dev');
const toFile = (filename) => {
if (options.srcDir == '.')
return filename;
return (0, path_1.join)(options.srcDir, filename);
};
const newScripts = {
'db:ui': 'erd-ui erd.txt',
'db:setup': 'npm run db:migrate',
'db:dev': 'run-s db:migrate db:plan db:update',
'db:migrate': 'knex migrate:latest',
};
let seed = 'db:seed' in scripts ? 'db:seed' : 'seed' in scripts ? 'seed' : '';
if (!seed && (0, fs_1.existsSync)('seed.ts')) {
seed = 'db:seed';
newScripts[seed] = 'ts-node seed.ts';
}
else if (!seed && (0, fs_1.existsSync)('seed')) {
seed = 'db:seed';
newScripts[seed] = 'knex seed:run';
}
if (seed) {
newScripts['db:setup'] = 'run-s db:migrate ' + seed;
}
if (options.db_client.includes('sqlite')) {
const proxyFile = toFile('proxy.ts');
newScripts['db:plan'] = `auto-migrate ${options.dbFile} < erd.txt`;
newScripts['db:rename'] =
`auto-migrate --rename ${options.dbFile} < erd.txt`;
newScripts['db:update'] = `run-s db:migrate db:gen-proxy`;
newScripts['db:gen-proxy'] = `erd-to-proxy < erd.txt > ${proxyFile}`;
}
else {
const typesFile = toFile('types.ts');
newScripts['db:plan'] = `auto-migrate ${options.db_client} < erd.txt`;
newScripts['db:rename'] =
`auto-migrate --rename ${options.db_client} < erd.txt`;
newScripts['db:update'] = `run-s db:migrate db:gen-types`;
newScripts['db:gen-types'] = `erd-to-types < erd.txt > ${typesFile}`;
}
(0, file_1.addNpmScripts)(newScripts);
}
function setupGitIgnore(options) {
(0, file_1.addGitIgnore)('.gitignore', [
'node_modules',
'*.tgz',
'dist',
'.env',
'.env.*',
'!.env.example',
]);
if (options.dbFile) {
const dir = (0, path_1.dirname)(options.dbFile);
const file = (0, path_1.join)(dir, '.gitignore');
(0, file_1.addGitIgnore)(file, [
'*.sqlite3',
'*.sqlite3-shm',
'*.sqlite3-wal',
'dump.sql',
'*.xz',
]);
}
}
const defaultPorts = {
mysql: 3306,
pg: 5432,
postgresql: 5432,
};
function setupEnvFile(options) {
const file = (0, path_1.join)(options.srcDir, 'env.ts');
if ((0, fs_1.existsSync)(file)) {
return;
}
(0, file_1.addDependencies)('dotenv', '^16.5.0');
(0, file_1.addDependencies)('populate-env', '^2.3.1');
const code = `
import { config } from 'dotenv'
import populateEnv from 'populate-env'
config()
export const env = {
DB_HOST: 'localhost',
DB_PORT: ${defaultPorts[options.db_client] || 0},
DB_NAME: '',
DB_USERNAME: '',
DB_PASSWORD: '',
DB_SSL: 'lax', // 'required' or 'lax' or 'false'
}
populateEnv(env, { mode: 'halt' })
`;
(0, file_1.writeSrcFile)(file, code);
}
function setupKnexTsFile(options) {
const file = (0, path_1.join)(options.srcDir, 'knex.ts');
if ((0, fs_1.existsSync)(file)) {
return;
}
let importDir = options.srcDir
.split('/')
.map(part => (part == '.' ? part : '..'))
.join('/');
let code = `
import Knex from 'knex'
/* eslint-disable @typescript-eslint/no-var-requires */
let configs = require('${importDir}/knexfile')
export const knex = Knex(configs.development)
`;
(0, file_1.writeSrcFile)(file, code);
}
function setupKnexFile(options) {
const { srcDir, db_client, ssl } = options;
const file = 'knexfile.ts';
if ((0, fs_1.existsSync)(file)) {
return;
}
(0, file_1.addDependencies)('knex', '^3.1.0');
let importDir = srcDir;
if (!importDir.startsWith('.')) {
importDir = './' + srcDir;
}
let code;
if (db_client.includes('sqlite')) {
code = `
import type { Knex } from 'knex'
import { dbFile } from '${importDir}/db'
const config: { [key: string]: Knex.Config } = {
development: {
client: ${(0, util_1.inspect)(db_client)},
useNullAsDefault: true,
connection: {
filename: dbFile,
},
}
}
module.exports = config;
`;
}
else {
code = `
import type { Knex } from 'knex'
import { env } from '${importDir}/env'
const config: { [key: string]: Knex.Config } = {
development: {
client: ${(0, util_1.inspect)(db_client)},
connection: {
database: env.DB_NAME,
user: env.DB_USERNAME,
password: env.DB_PASSWORD,
host: env.DB_HOST,
port: env.DB_PORT,
multipleStatements: true,
ssl: ${ssl == 'required' ? '{ rejectUnauthorized: true }' : ssl == 'lax' ? '{ rejectUnauthorized: false }' : 'false'},
},
}
}
module.exports = config;
`;
}
(0, file_1.writeSrcFile)(file, code);
}
const migrations_dir = 'migrations';
function diffArray(aArray, bArray) {
const aSet = new Set(aArray);
const bSet = new Set(bArray);
const aDiff = [];
const bDiff = [];
for (const a of aSet) {
if (bSet.has(a))
continue;
aDiff.push(a);
}
for (const b of bSet) {
if (aSet.has(b))
continue;
bDiff.push(b);
}
return [aDiff, bDiff];
}
async function setupKnexMigration(options) {
if (!(0, fs_1.existsSync)(migrations_dir)) {
(0, fs_1.mkdirSync)(migrations_dir);
}
const { knex, db_client } = options;
await checkPendingMigrations(knex);
log('Scanning existing database schema...');
let existing_table_list = await loadTableList(knex, db_client);
existing_table_list = existing_table_list.filter(table => !(0, table_1.isInternalTable)(table.name));
const { up_lines, down_lines } = generateAutoMigrate({
existing_table_list,
parsed_table_list: options.parseResult.table_list,
detect_rename: options.detect_rename,
db_client: options.db_client,
});
if (up_lines.length === 0 && down_lines.length === 0) {
log('No migration is needed.');
}
else {
const code = `
import { Knex } from 'knex'
// prettier-ignore
export async function up(knex: Knex): Promise<void> {
${up_lines.join('\n')}
}
// prettier-ignore
export async function down(knex: Knex): Promise<void> {
${down_lines.join('\n')}
}
`.replaceAll('{\n\n', '{\n');
let file = await knex.migrate.make('auto-migrate', { extension: 'ts' });
file = file.replace((0, path_1.join)(process.cwd(), migrations_dir), migrations_dir);
(0, file_1.writeSrcFile)(file, code);
}
await knex.destroy();
}
function generateAutoMigrate(options) {
const { db_client } = options;
const is_sqlite = db_client.includes('sqlite');
const is_postgres = db_client.includes('postgres') || db_client == 'pg';
const up_lines = [];
const down_lines = [];
const tailing_up_lines = [];
const leading_down_lines = [];
const renamed_tables = [];
// detect renamed / deleted tables
const [diff_existing_table_names, diff_parsed_table_names] = diffArray(options.existing_table_list.map(table => table.name), options.parsed_table_list.map(table => table.name));
for (let i = 0; i < diff_existing_table_names.length; i++) {
const existing_table_name = diff_existing_table_names[i];
const existing_table = options.existing_table_list.find(table => table.name === existing_table_name);
if (!existing_table)
continue;
// detect rename table
if (options.detect_rename &&
options.existing_table_list.length === options.parsed_table_list.length) {
const parsed_table_name = (0, fastest_levenshtein_1.closest)(existing_table_name, diff_parsed_table_names);
const parsed_table = options.parsed_table_list.find(table => table.name === parsed_table_name);
if (!parsed_table)
continue;
up_lines.push(` await knex.schema.renameTable('${existing_table_name}', '${parsed_table_name}')`);
down_lines.push(` await knex.schema.renameTable('${parsed_table_name}', '${existing_table_name}')`);
renamed_tables.push(parsed_table_name);
// remove matched pair
diff_existing_table_names.splice(i, 1);
i--;
const idx = diff_parsed_table_names.indexOf(parsed_table_name);
diff_parsed_table_names.splice(idx, 1);
continue;
}
// detected deleted table
tailing_up_lines.push(` await knex.schema.dropTableIfExists('${existing_table_name}')`);
leading_down_lines.unshift((0, text_to_knex_1.toKnexCreateTableCode)(existing_table, db_client));
}
// detect new / modified tables
(0, sort_tables_1.sortTables)(options.parsed_table_list).forEach(table => {
const { name, field_list } = table;
const existing_table = options.existing_table_list.find(table => table.name === name);
if (!existing_table) {
if (renamed_tables.includes(name))
return;
up_lines.push((0, text_to_knex_1.toKnexCreateTableCode)(table, db_client));
down_lines.unshift(` await knex.schema.dropTableIfExists('${name}')`);
return;
}
const table_up_lines = [];
const table_down_lines = [];
const raw_up_lines = [];
const raw_down_lines = [];
const new_columns = [];
const removed_columns = [];
function compareColumn(field, existing_field) {
// avoid non-effective migration
// don't distinct datetime and timestamp
// knex translates 'timestamp' into 'datetime' for sqlite db when running schema query builder
if (is_sqlite &&
((field.type === 'datetime' && existing_field.type == 'timestamp') ||
(existing_field.type === 'datetime' && field.type == 'timestamp'))) {
field.type = existing_field.type;
}
// avoid non-effective migration
// don't distinct int and integer
// don't distinct int(10) and integer
if (is_sqlite &&
(((field.type === 'int' || field.type.startsWith('int(')) &&
existing_field.type == 'integer') ||
((existing_field.type === 'int' ||
existing_field.type.startsWith('int(')) &&
field.type == 'integer'))) {
field.type = existing_field.type;
}
// avoid non-effective migration
// don't distinct enum values ordering
if (field.type.match(/^enum/i) &&
existing_field.type.match(/^enum/i) &&
(0, enum_1.parseEnumValues)(field.type).sort().join() ==
(0, enum_1.parseEnumValues)(existing_field.type).sort().join()) {
field.type = existing_field.type;
}
// avoid non-effective migration
// don't distinct signed and unsigned in postgres
if (is_postgres || is_sqlite) {
field.is_unsigned = existing_field.is_unsigned;
}
// avoid non-effective migration
// don't distinct varchar and nvarchar in sqlite
if (is_sqlite && field.type != existing_field.type) {
if (field.type.startsWith('nvarchar') &&
existing_field.type.startsWith('varchar')) {
field.type = field.type.slice(1);
}
else if (field.type.startsWith('varchar') &&
existing_field.type.startsWith('nvarchar')) {
existing_field.type = existing_field.type.slice(1);
}
}
if (field.type !== existing_field.type ||
field.is_unsigned !== existing_field.is_unsigned) {
const is_both_enum = field.type.match(/^enum/i) && existing_field.type.match(/^enum/i);
if (is_sqlite && is_both_enum) {
raw_up_lines.push(alterSqliteEnum(table, field));
raw_down_lines.unshift(alterSqliteEnum(table, existing_field));
}
if (is_postgres && is_both_enum) {
table_up_lines.push(...alterPostgresEnum(table, field));
table_down_lines.unshift(...alterPostgresEnum(table, existing_field));
}
else if (is_sqlite) {
raw_up_lines.push(alterSqliteType(table, field));
raw_down_lines.unshift(alterSqliteType(table, existing_field));
}
else {
table_up_lines.push(alterType(field, db_client));
table_down_lines.unshift(alterType(existing_field, db_client));
}
}
if (field.is_primary_key !== existing_field.is_primary_key) {
table_up_lines.push(alterPrimaryKey(field));
table_down_lines.unshift(alterPrimaryKey(existing_field));
}
if (field.is_unique !== existing_field.is_unique) {
table_up_lines.push(alterUnique(field));
table_down_lines.unshift(alterUnique(existing_field));
}
if (field.is_null !== existing_field.is_null) {
if (is_sqlite) {
raw_up_lines.push(alterSqliteNullable(options.parsed_table_list, table.name, field));
raw_down_lines.unshift(alterSqliteNullable(options.existing_table_list, table.name, existing_field));
}
else {
table_up_lines.push(alterNullable(field));
table_down_lines.unshift(alterNullable(existing_field));
}
}
// add foreign key
if (field.references && !existing_field.references) {
table_up_lines.push(addForeignKey(field));
table_down_lines.unshift(dropForeignKey(field));
}
// drop foreign key
else if (!field.references && existing_field.references) {
table_up_lines.push(dropForeignKey(existing_field));
table_down_lines.unshift(addForeignKey(existing_field));
}
// change foreign key
else if (field.references &&
existing_field.references &&
(field.references.table !== existing_field.references.table ||
field.references.field !== existing_field.references.field)) {
table_up_lines.push(dropForeignKey(existing_field));
table_down_lines.unshift(addForeignKey(existing_field));
table_up_lines.push(addForeignKey(field));
table_down_lines.unshift(dropForeignKey(field));
}
}
field_list.forEach(field => {
const { name } = field;
const existing_field = existing_table.field_list.find(field => field.name === name);
// detect new columns
if (!existing_field) {
new_columns.push(field);
return;
}
compareColumn(field, existing_field);
});
// detected removed columns
existing_table.field_list.forEach(existing_field => {
const { name } = existing_field;
if (name === 'created_at' ||
name === 'updated_at' ||
table.field_list.some(field => field.name === name)) {
return;
}
removed_columns.push(existing_field);
});
// detect renamed fields
if (options.detect_rename &&
new_columns.length === removed_columns.length) {
for (let i = 0; i < new_columns.length; i++) {
const field = new_columns[i];
const new_field_name = field.name;
const existing_field_name = (0, fastest_levenshtein_1.closest)(field.name, removed_columns.map(existing_field => existing_field.name));
const j = removed_columns.findIndex(existing_field => existing_field.name === existing_field_name);
const existing_field = removed_columns[j];
compareColumn({ ...field, name: existing_field_name }, existing_field);
table_up_lines.push(renameColumn(existing_field_name, new_field_name));
table_down_lines.unshift(renameColumn(new_field_name, existing_field_name));
new_columns.splice(i, 1);
removed_columns.splice(j, 1);
i--;
}
}
function addDropColumn(field, options) {
if (is_sqlite) {
/* sqlite version */
const table = wrapSqliteName(name);
const col = wrapSqliteName(field.name);
const { references, is_unique } = field;
const quoted_field = { ...field, name: col, is_unique: false };
if (references) {
quoted_field.references = {
type: references.type,
table: wrapSqliteName(references.table),
field: wrapSqliteName(references.field),
};
}
const body = (0, text_to_sqlite_1.toSqliteColumnSql)(quoted_field);
options.raw_add_lines.push(` await knex.raw(${(0, util_1.inspect)(`alter table ${table} add column ${body}`)})`);
options.raw_drop_lines.unshift(references || field.is_unique
? ` await knex.schema.alterTable(${table}, table => table.dropColumn(${col}))`
: ` await knex.raw(${(0, util_1.inspect)(`alter table ${table} drop column ${col}`)})`);
if (is_unique) {
options.raw_add_lines.push(` await knex.schema.alterTable(${table}, table => table.unique([${col}]))`);
options.raw_drop_lines.unshift(` await knex.schema.alterTable(${table}, table => table.dropUnique([${col}]))`);
}
}
else {
/* knex version */
const name = (0, util_1.inspect)(field.name);
options.table_add_lines.push((0, text_to_knex_1.toKnexCreateColumnCode)(field, db_client));
options.table_drop_lines.unshift(`table.dropColumn(${name})`);
}
}
// add new columns
new_columns.forEach(field => {
addDropColumn(field, {
table_add_lines: table_up_lines,
table_drop_lines: table_down_lines,
raw_add_lines: raw_up_lines,
raw_drop_lines: raw_down_lines,
});
});
// drop removed columns
removed_columns.forEach(existing_field => {
addDropColumn(existing_field, {
table_drop_lines: table_up_lines,
table_add_lines: table_down_lines,
raw_drop_lines: raw_up_lines,
raw_add_lines: raw_down_lines,
});
});
function mergeLines(lines) {
return lines
.map(line => ' ' + line.trim())
.join('\n')
.replace(/\n\n/g, '\n');
}
if (table_up_lines.length > 0) {
up_lines.push(` await knex.schema.alterTable('${name}', table => {
${mergeLines(table_up_lines)}
})`);
}
up_lines.push(...raw_up_lines);
if (table_down_lines.length > 0) {
down_lines.unshift(` await knex.schema.alterTable('${name}', table => {
${mergeLines(table_down_lines)}
})`);
}
down_lines.unshift(...raw_down_lines);
});
up_lines.push(...tailing_up_lines);
down_lines.unshift(...leading_down_lines);
return { up_lines, down_lines };
}
function alterSqliteField(table, field, columnDefinition) {
if (!field.is_null) {
throw new Error(`alter non-nullable column (${table.name}.${field.name}) is not supported in sqlite`);
}
let drop_lines = '';
let add_lines = '';
if (field.is_unique) {
drop_lines += `
await knex.schema.alterTable('${table.name}', table => table.dropUnique(['${field.name}']))`;
add_lines += `
await knex.schema.alterTable('${table.name}', table => table.unique(['${field.name}']))`;
}
if (field.references) {
drop_lines += `
await knex.schema.alterTable('${table.name}', table => table.dropForeign(['${field.name}']))`;
}
const code = `
{
const rows = await knex.select('id', '${field.name}').from('${table.name}')${drop_lines}
await knex.raw('alter table \`${table.name}\` drop column \`${field.name}\`')
await knex.raw("alter table \`${table.name}\` add column ${columnDefinition}")${add_lines}
for (let row of rows) {
await knex('${table.name}').update({ ${field.name}: row.${field.name} }).where({ id: row.id })
}
}`;
return ' ' + code.trim();
}
function alterSqliteType(table, field) {
const col = wrapSqliteName(field.name);
const quoted_field = { ...field, name: col };
quoted_field.is_unique = false;
const body = (0, text_to_sqlite_1.toSqliteColumnSql)(quoted_field);
return alterSqliteField(table, field, body);
}
function alterSqliteEnum(table, field) {
const col = wrapSqliteName(field.name);
const values = field.type.replace(/enum/i, '');
const columnDefinition = `${col} text check (${col} in ${values})`;
return alterSqliteField(table, field, columnDefinition);
}
function alterSqliteNullable(allTables, tableName, field) {
const db_client = 'better-sqlite3';
const table = allTables.find(table => table.name == tableName);
if (!table) {
throw new Error('table not found, name: ' + tableName);
}
const involvedTables = [table];
function scanDeps(tableName) {
for (const table of allTables) {
if (involvedTables.includes(table))
continue;
for (const field of table.field_list) {
if (field.references?.table == tableName) {
involvedTables.push(table);
scanDeps(table.name);
break;
}
}
}
}
scanDeps(tableName);
function genCreateTable(table) {
const code = (0, text_to_knex_1.toKnexCreateTableCode)(table, db_client);
const lines = code.split('\n').slice(1);
lines.forEach((line, i) => {
if (i == 0) {
lines[i] = line.slice(2);
}
else {
lines[i] = ' ' + line;
}
});
return lines.join('\n');
}
const code = `
{
// alter column (${table.name}.${field.name}) to be ${field.is_null ? 'nullable' : 'non-nullable'}
${involvedTables.map(table => `let ${table.name}_rows = await knex.select('*').from('${table.name}')`).join('\n ')}
${involvedTables
.slice()
.reverse()
.map(table => `await knex.schema.dropTable('${table.name}')`)
.join('\n ')}
${involvedTables.map(genCreateTable).join('\n ')}
${involvedTables
.map(table => `for (let row of ${table.name}_rows) {
await knex.insert(row).into('${table.name}')
}`)
.join('\n ')}
}`;
return code;
}
function alterPostgresEnum(table, field) {
const lines = [];
const name = `${table.name}_${field.name}_check`;
const values = field.type.replace(/enum/i, '');
lines.push(`table.dropChecks('${name}')`);
lines.push(`table.check(\`"${field.name}" in ${values}\`, undefined, '${name}')`);
return lines;
}
function alterType(field, db_client) {
let code = 'table';
code += (0, text_to_knex_1.toKnexCreateColumnTypeCode)(field, db_client);
code += (0, text_to_knex_1.toKnexNullableCode)(field);
code += (0, text_to_knex_1.toKnexDefaultValueCode)(field);
code += '.alter()';
return code;
}
function alterPrimaryKey(field) {
if (field.is_unique) {
return `table.primary([${(0, util_1.inspect)(field.name)}])`;
}
else {
return `table.dropPrimary([${(0, util_1.inspect)(field.name)}])`;
}
}
function alterUnique(field) {
if (field.is_unique) {
return `table.unique([${(0, util_1.inspect)(field.name)}])`;
}
else {
return `table.dropUnique([${(0, util_1.inspect)(field.name)}])`;
}
}
function alterNullable(field) {
if (field.is_null) {
return `table.setNullable(${(0, util_1.inspect)(field.name)})`;
}
else {
return `table.dropNullable(${(0, util_1.inspect)(field.name)})`;
}
}
function addForeignKey(field) {
if (!field.references) {
return '';
}
return `table.foreign(${(0, util_1.inspect)(field.name)}).references(${(0, util_1.inspect)(field.references.table + '.' + field.references.field)})`;
}
function dropForeignKey(field) {
return `table.dropForeign(${(0, util_1.inspect)(field.name)})`;
}
function renameColumn(from, to) {
return `table.renameColumn(${(0, util_1.inspect)(from)}, ${(0, util_1.inspect)(to)})`;
}
async function loadTableList(knex, db_client) {
if (db_client.includes('sqlite')) {
const rows = await knex.raw(/* sql */ `select name, sql, type from sqlite_master`);
return (0, sqlite_parser_1.parseTableSchema)(rows);
}
if (db_client === 'pg' || db_client.includes('postgres')) {
return await (0, pg_to_text_1.scanPGTableSchema)(knex);
}
if (db_client.includes('mysql')) {
return await (0, mysql_to_text_1.scanMysqlTableSchema)(knex);
}
if (db_client.includes('mssql')) {
return await (0, mssql_to_text_1.scanMssqlTableSchema)(knex);
}
throw new Error('unknown db_client: ' + db_client);
}
async function checkPendingMigrations(knex) {
const files = (0, fs_1.readdirSync)(migrations_dir);
if (files.length === 0) {
return;
}
const status = await knex.migrate.status().catch(async (e) => {
const hasTable = await knex.schema.hasTable('knex_migrations');
if (!hasTable) {
return -files.length;
}
throw e;
});
if (status === 0) {
return;
}
console.error('Error: not migrated to latest version.');
console.error("Please run 'npx knex migrate:latest' first, then re-run this auto-migrate command.");
process.exit(1);
}
const log = console.error.bind(console);
const quotes = ['"', "'", '`'];
function wrapSqliteName(name) {
for (const quote of quotes) {
if (name.startsWith(quote) && name.endsWith(quote)) {
name = name.slice(1, name.length - 1);
break;
}
}
return '`' + name + '`';
}
;