ddl-manager
Version:
store postgres procedures and triggers in files
435 lines • 17.8 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.DDLManager = void 0;
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const FileReader_1 = require("./fs/FileReader");
const FileWatcher_1 = require("./fs/FileWatcher");
const MainComparator_1 = require("./Comparator/MainComparator");
const MainMigrator_1 = require("./Migrator/MainMigrator");
const PostgresDriver_1 = require("./database/PostgresDriver");
const Database_1 = require("./database/schema/Database");
const getDbClient_1 = require("./database/getDbClient");
const FunctionsMigrator_1 = require("./Migrator/FunctionsMigrator");
const callsTable_1 = require("./timeline/callsTable");
const Coach_1 = require("./timeline/Coach");
const createTimelineFile_1 = require("./timeline/createTimelineFile");
const CacheComparator_1 = require("./Comparator/CacheComparator");
const Auditor_1 = require("./Auditor");
const CacheColumnGraph_1 = require("./Comparator/graph/CacheColumnGraph");
const packageJson = require("../package.json");
const watchers = [];
class DDLManager {
constructor(params) {
var _a;
this.activeMigrators = [];
if (typeof params.folder === "string") {
this.folders = [params.folder];
}
else {
this.folders = params.folder;
}
this.folders = this.folders.map(folderPath => path_1.default.normalize(folderPath));
this.needCloseConnect = !("query" in params.db);
this.dbConfig = params.db;
this.needThrowError = !!params.throwError;
this.needLogs = (_a = params.needLogs) !== null && _a !== void 0 ? _a : true;
}
static async build(params) {
console.log("ddl-manager v" + packageJson.version);
const ddlManager = new DDLManager(params);
return await ddlManager.build();
}
static async buildNew(params) {
const ddlManager = new DDLManager(params);
return await ddlManager.buildNew();
}
static async buildNewColumnsAndFunctions(params) {
const ddlManager = new DDLManager(params);
return await ddlManager.buildNewColumnsAndFunctions();
}
static async dropOld(params) {
const ddlManager = new DDLManager(params);
return await ddlManager.dropOld();
}
static async compare(params) {
const ddlManager = new DDLManager(params);
const { migration } = await ddlManager.compareDbAndFs();
return migration;
}
static async audit(params) {
const ddlManager = new DDLManager(params);
await ddlManager.audit(params);
}
static async compareCache(params) {
const ddlManager = new DDLManager(params);
return await ddlManager.compareCache();
}
static async timeline(params) {
const ddlManager = new DDLManager(params);
return await ddlManager.timeline(params);
}
static async refreshCache(params) {
const ddlManager = new DDLManager(params);
return await ddlManager.refreshCache(params);
}
static async scanBrokenColumns(params) {
const ddlManager = new DDLManager(params);
return await ddlManager.scanBrokenColumns(params);
}
static async watch(params) {
const ddlManager = new DDLManager({
db: params.db,
folder: params.folder
});
return await ddlManager.watch();
}
static async dump(params) {
const ddlManager = new DDLManager({
db: params.db,
folder: params.folder
});
return await ddlManager.dump(params.unfreeze);
}
static stopWatch() {
watchers.forEach(watcher => {
watcher.stopWatch();
});
watchers.splice(0, watchers.length);
}
async build() {
const { migration, database, postgres } = await this.compareDbAndFs();
const migrateErrors = await MainMigrator_1.MainMigrator.migrate(postgres, database, migration);
this.onMigrate(postgres, migration, migrateErrors);
}
async buildNew() {
const { migration, database, postgres } = await this.compareDbAndFs();
const migrator = new MainMigrator_1.MainMigrator(postgres, database, migration);
const migrateErrors = await migrator.createNew();
this.onMigrate(postgres, migration, migrateErrors);
}
async buildNewColumnsAndFunctions() {
const { migration, database, postgres } = await this.compareDbAndFs();
const migrator = new MainMigrator_1.MainMigrator(postgres, database, migration);
const migrateErrors = await migrator.buildNewColumnsAndFunctions();
if (this.needCloseConnect) {
postgres.end();
}
if (migrateErrors.length) {
throw migrateErrors[0];
}
}
async dropOld() {
const { migration, database, postgres } = await this.compareDbAndFs();
const migrator = new MainMigrator_1.MainMigrator(postgres, database, migration);
const migrateErrors = await migrator.dropOld();
this.onMigrate(postgres, migration, migrateErrors);
}
// TODO: need tests and refactor
async timeline(params) {
console.log("reading scenarios");
let scenarios = readScenarios(params.scenariosPath);
if (params.runOnlyScenario) {
console.log("run only " + params.runOnlyScenario);
scenarios = scenarios.filter(scenario => scenario.name === params.runOnlyScenario);
}
if (!scenarios.length) {
throw new Error("scenarios not found");
}
const db = await getDbClient_1.getDbClient(this.dbConfig);
const filesState = await this.readFS();
const postgres = await this.postgres();
let database = await postgres.load();
const migration = await MainComparator_1.MainComparator.logAllFuncsMigration(postgres, database, filesState);
await callsTable_1.createCallsTable(db);
console.log("logging all funcs");
const migrationErrors = [];
const functionsMigrator = new FunctionsMigrator_1.FunctionsMigrator(postgres, migration, database, migrationErrors);
await functionsMigrator.createLogFuncs();
for (const scenario of scenarios) {
console.log("try run scenario " + scenario.name);
try {
await testTimelineScenario(params.outputPath, db, scenario);
}
catch (err) {
console.error(err);
throw new Error("failed scenario " + scenario.name + " with error: " + err.message);
}
}
console.log("unlogging all funcs");
database = await postgres.load();
const unlogMigration = await MainComparator_1.MainComparator.compare(postgres, database, filesState);
await MainMigrator_1.MainMigrator.migrate(postgres, database, unlogMigration);
console.log("success");
}
async refreshCache(params) {
var _a, _b, _c;
if (params.logToFile) {
fs_1.default.writeFileSync(params.logToFile, `[${new Date().toISOString()}] started\n`);
}
const filesState = await this.readFS();
const postgres = await this.postgres();
const database = await postgres.load();
const migration = await MainComparator_1.MainComparator.refreshCache(postgres, database, filesState, (_b = (_a = params.concreteTables) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : (_c = params.concreteColumns) === null || _c === void 0 ? void 0 : _c.toString());
const timeoutBetweenUpdates = params.timeoutBetweenUpdates || params.timeoutOnUpdate;
if (timeoutBetweenUpdates) {
migration.setTimeoutBetweenUpdates(timeoutBetweenUpdates);
}
if (params.timeoutPerUpdate) {
migration.setTimeoutPerUpdate(params.timeoutPerUpdate);
}
if (params.updatePackageSize) {
migration.setUpdatePackageSize(params.updatePackageSize);
}
if (params.logToFile) {
migration.logToFile(params.logToFile);
}
migration.setUpdateHooks(params);
const migrateErrors = await MainMigrator_1.MainMigrator.migrate(postgres, database, migration);
this.onMigrate(postgres, migration, migrateErrors);
if (params.logToFile) {
fs_1.default.appendFileSync(params.logToFile, `\n[${new Date().toISOString()}] finished`);
}
}
async audit(params) {
const filesState = await this.readFS();
const postgres = await this.postgres();
const database = await postgres.load();
const graph = CacheColumnGraph_1.CacheColumnGraph.build(database.aggregators, filesState.allCache());
const scanner = new Auditor_1.CacheScanner(postgres, database, graph);
const auditor = new Auditor_1.CacheAuditor(postgres, database, filesState, graph, scanner);
await auditor.audit(params);
}
async compareCache() {
const filesState = await this.readFS();
const postgres = await this.postgres();
const database = await postgres.load();
const cacheComparator = new CacheComparator_1.CacheComparator(postgres, database, filesState);
const columns = cacheComparator.findChangedColumns();
return columns;
}
async scanBrokenColumns(params) {
const filesState = await this.readFS();
const postgres = await this.postgres();
const database = await postgres.load();
const scanner = new Auditor_1.CacheScanner(postgres, database, CacheColumnGraph_1.CacheColumnGraph.build(new Database_1.Database().aggregators, filesState.allCache()));
const columns = await scanner.scan(params);
return columns;
}
async compareDbAndFs() {
const filesState = await this.readFS();
const postgres = await this.postgres();
const database = await postgres.load();
const migration = await MainComparator_1.MainComparator.compare(postgres, database, filesState);
if (this.needLogs === false) {
migration.silent();
}
return { migration, postgres, database };
}
readFS() {
return new Promise((resolve, reject) => {
const filesState = FileReader_1.FileReader.read(this.folders, (err) => {
if (this.needThrowError) {
reject(err);
}
else {
console.log(err);
}
});
resolve(filesState);
});
}
onMigrate(postgres, migration, migrateErrors) {
if (this.needCloseConnect) {
postgres.end();
}
if (this.needLogs) {
migration.log();
if (!migrateErrors.length) {
// tslint:disable-next-line: no-console
console.log(`ddl-manager v${packageJson.version} build success`);
}
}
if (this.needThrowError && migrateErrors.length) {
throw migrateErrors[0];
}
}
async watch() {
const postgres = await this.postgres();
const database = await postgres.load();
const fs = await FileWatcher_1.FileWatcher.watch(this.folders);
const state = {
database, fs
};
fs.on("change", () => {
void this.onChangeFs(postgres, state);
});
fs.on("error", (err) => {
console.error(err.message);
});
watchers.push(fs);
return fs;
}
async onChangeFs(postgres, state) {
const needAbort = this.activeMigrators.length > 0;
if (needAbort) {
state.database = await postgres.load();
}
const migration = await MainComparator_1.MainComparator.compare(postgres, state.database, state.fs.state);
const migrator = new MainMigrator_1.MainMigrator(postgres, state.database, migration);
for (const migrator of this.activeMigrators) {
console.log("-- aborted migration --");
migrator.abort();
}
this.activeMigrators = [migrator];
const migrateErrors = await migrator.migrate();
if (!migrator.isAborted()) {
state.database.applyMigration(migration);
migration.log();
if (migrateErrors.length) {
console.error(migrateErrors);
}
}
this.activeMigrators = this.activeMigrators.filter(activeMigrator => activeMigrator !== migrator);
}
async dump(unfreeze = false) {
const folder = this.folders[0];
if (!fs_1.default.existsSync(folder)) {
throw new Error(`folder "${folder}" not found`);
}
const postgres = await this.postgres();
const dbState = await postgres.load();
const existsFolders = {};
// functions from database
const functions = dbState.functions.slice();
for (let i = 0, n = functions.length; i < n; i++) {
const func = functions[i];
const sameFuncs = [func];
// find functions with same name
// expected sorted array by schema/name
for (let j = i + 1; j < n; j++) {
const nextFunc = functions[j];
const isSame = (nextFunc.schema === func.schema &&
nextFunc.name === func.name);
if (isSame) {
sameFuncs.push(nextFunc);
// remove from stack
functions.splice(j, 1);
// after splice, length is less
j--;
n--;
}
else {
break;
}
}
// generate sql
let sql = "";
let firstTrigger = false;
sameFuncs.forEach((sameFunc, j) => {
if (j > 0) {
sql += ";\n";
sql += "\n";
}
sql += sameFunc.toSQL();
});
// file can contain triggers
// find triggers, who call this func
const isTrigger = sameFuncs.some(sameFunc => sameFunc.returns.type === "trigger");
if (isTrigger) {
const triggers = dbState.getTriggersByProcedure({
schema: func.schema,
name: func.name
});
if (triggers.length) {
if (!firstTrigger) {
firstTrigger = triggers[0];
}
triggers.forEach(trigger => {
sql += ";\n";
sql += "\n";
sql += trigger.toSQL();
});
}
}
// create dirs and file
const fileName = func.name + ".sql";
// create folder public or some schema
let subFolder = func.schema;
if (firstTrigger) {
subFolder = firstTrigger.table.schema;
}
let dirPath = folder + "/" + subFolder;
if (!existsFolders[subFolder]) {
if (!fs_1.default.existsSync(dirPath)) {
fs_1.default.mkdirSync(dirPath);
}
existsFolders[subFolder] = true;
}
if (firstTrigger) {
subFolder = firstTrigger.table.schema + "/" + firstTrigger.table.name;
dirPath = folder + "/" + subFolder;
if (!existsFolders[subFolder]) {
if (!fs_1.default.existsSync(dirPath)) {
fs_1.default.mkdirSync(dirPath);
}
existsFolders[subFolder] = true;
}
}
// save sql
fs_1.default.writeFileSync(dirPath + "/" + fileName, sql + ";");
}
if (unfreeze) {
await postgres.unfreezeAll(dbState);
}
if (this.needCloseConnect) {
postgres.end();
}
}
async postgres() {
const db = await getDbClient_1.getDbClient(this.dbConfig);
const postgres = new PostgresDriver_1.PostgresDriver(db);
return postgres;
}
}
exports.DDLManager = DDLManager;
function readScenarios(scenariosPath) {
const scenarios = [];
const dirs = fs_1.default.readdirSync(scenariosPath);
for (const scenarioDirName of dirs) {
const scenario = readScenario(scenariosPath, scenarioDirName);
scenarios.push(scenario);
}
return scenarios;
}
function readScenario(scenariosPath, scenarioDirName) {
const beforeSQLPath = path_1.default.join(scenariosPath, scenarioDirName, "before.sql");
const sqlPath = path_1.default.join(scenariosPath, scenarioDirName, "test.sql");
const beforeSQL = fs_1.default.readFileSync(beforeSQLPath).toString();
const sql = fs_1.default.readFileSync(sqlPath).toString();
return {
name: scenarioDirName,
beforeSQL,
sql
};
}
async function testTimelineScenario(outputPath, db, scenario = {
name: "test",
beforeSQL: "select 1",
sql: "select 1"
}) {
await db.query(scenario.beforeSQL);
await callsTable_1.clearCallsLogs(db);
await db.query(scenario.sql);
const logs = await callsTable_1.downloadLogs(db);
let rootCalls = Coach_1.parseCalls(logs);
createTimelineFile_1.createTimelineFile({
rootCalls,
outputPath,
name: scenario.name
});
}
//# sourceMappingURL=DDLManager.js.map