qforce
Version:
Commands to help with salesforce development.
289 lines (288 loc) • 14 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const command_1 = require("@oclif/command");
const cli_ux_1 = require("cli-ux");
const moment = require("moment");
const utility_1 = require("../../helper/utility");
const migPlanSamples_1 = require("../../helper/migPlanSamples");
const random_1 = require("../../helper/random");
const sfdx = require('sfdx-node');
const path = require('path');
const fs = require('fs');
const csvjson = require('csvjson');
const lodash = require('lodash');
const sha1 = require('js-sha1');
class Migrate extends command_1.Command {
async run() {
let settings;
if (fs.existsSync(utility_1.getAbsolutePath('.qforce/settings.json'))) {
settings = JSON.parse(fs.readFileSync(utility_1.getAbsolutePath('.qforce/settings.json')));
}
const { flags } = this.parse(Migrate);
if (flags.sample) {
for (let key in migPlanSamples_1.allSamples) {
fs.writeFileSync(utility_1.getAbsolutePath(key + '.js'), migPlanSamples_1.allSamples[key], { encoding: 'utf-8' });
}
return;
}
let file = flags.file || 'migrationPlan.js';
if (!fs.existsSync(utility_1.getAbsolutePath(file)) && settings.migrateBasePath) {
file = settings.migrateBasePath + '/' + file;
}
if (!fs.existsSync(utility_1.getAbsolutePath(file))) {
this.log('No plan file provided. Run "qforce dev:migrate --sample" to get a sample.');
}
let basePath = file.split('/');
basePath.pop();
let dataPath = basePath.slice();
dataPath.push('data');
let refPath = basePath.slice();
refPath.push('reference');
if (flags.clearDataFolder) {
if (fs.existsSync(path.join(process.cwd(), ...dataPath))) {
utility_1.deleteFolderRecursive(dataPath.join('/'));
}
}
if (flags.clearRefFolder) {
if (fs.existsSync(path.join(process.cwd(), ...refPath))) {
utility_1.deleteFolderRecursive(refPath.join('/'));
}
}
const migrationPlan = await Promise.resolve().then(() => require(utility_1.getAbsolutePath(file)));
const globalVars = {
moment: moment,
random: random_1.random,
lodash: lodash,
sha1: sha1,
getProp: utility_1.getProp,
plan: migrationPlan
};
if (migrationPlan.calculateFlags) {
for (let key in globalVars) {
if (key != 'plan')
migrationPlan[key] = globalVars[key];
}
migrationPlan.calculateFlags.call(migrationPlan);
}
// Clear data folder will delete all existing csv files in data folder
if (migrationPlan.clearDataFolder) {
if (fs.existsSync(path.join(process.cwd(), ...dataPath))) {
utility_1.deleteFolderRecursive(dataPath.join('/'));
}
}
if (migrationPlan.clearRefFolder) {
if (fs.existsSync(path.join(process.cwd(), ...refPath))) {
utility_1.deleteFolderRecursive(refPath.join('/'));
}
}
const startIndex = migrationPlan.startIndex || 0;
const stopIndex = migrationPlan.stopIndex || migrationPlan.steps.length;
for (let i = startIndex; i < stopIndex; i++) {
let step = migrationPlan.steps[i];
if (!step.name)
continue;
if (flags.name) {
if (step.name != flags.name)
continue;
}
this.log(i + ' - Step ' + step.name + ' - Started');
//step['random'] = random
for (let key in globalVars) {
step[key] = globalVars[key];
}
if (step.references) {
step = utility_1.setStepReferences(step, basePath.join('/'));
}
if (step.calculateFlags) {
step.calculateFlags.call(step);
}
if (step.skip) {
this.log(i + ' - Step ' + step.name + ' - Skipped');
continue;
}
if (step.apexCodeFile && (flags.destination || migrationPlan.destination)) {
let apexCodePath = utility_1.getAbsolutePath(step.apexCodeFile);
if (!fs.existsSync(apexCodePath)) {
this.log(apexCodePath + ' does not exist');
apexCodePath = utility_1.getAbsolutePath(basePath.join('/') + '/' + step.apexCodeFile);
this.log('Checking at ' + apexCodePath);
}
if (!fs.existsSync(apexCodePath)) {
this.log(apexCodePath + ' does not exist');
this.log('Path must be relative to project base or migration plan file.');
continue;
}
let options = {};
options.apexcodefile = apexCodePath;
options.targetusername = flags.destination || migrationPlan.destination;
let exeResults = await sfdx.apex.execute(options);
if (exeResults && exeResults.logs)
this.log(exeResults.logs);
continue;
}
if (step.generateData && !step.query) {
let generatedData = step.generateData.call(step);
if (generatedData.length < 1) {
const manualCheck = await cli_ux_1.default.confirm('No data generated. Continue?');
if (!manualCheck)
break;
}
generatedData.map(utility_1.prepJsonForCsv);
if (!fs.existsSync(path.join(process.cwd(), ...dataPath))) {
fs.mkdirSync(path.join(process.cwd(), ...dataPath), { recursive: true });
}
fs.writeFileSync(path.join(process.cwd(), ...dataPath, `${step.name}.csv`), csvjson.toCSV(generatedData, { headers: 'relative', wrap: true }), { encoding: 'utf-8' });
}
if (step.query &&
(step.queryDestination
|| step.isDelete
|| flags.source
|| migrationPlan.source
|| step.source)) {
cli_ux_1.default.action.start(i + ' - Step ' + step.name + ' querying data');
let targetusername;
if (step.queryDestination || step.isDelete) {
targetusername = flags.destination || migrationPlan.destination || step.destination;
}
else {
targetusername = flags.source || migrationPlan.source || step.source;
}
let queryString = step.query;
if (queryString.includes('*')) {
queryString = await utility_1.getQueryAll(queryString, targetusername, true);
}
let options = {};
options.query = queryString;
options.targetusername = targetusername;
let queryResult;
try {
queryResult = await sfdx.data.soqlQuery(options);
}
catch (err) {
cli_ux_1.default.action.stop('Error in querying the data: ' + JSON.stringify(err, null, 2));
if (settings.ignoreError)
continue;
else
break;
}
queryResult.records.map(utility_1.handleNullValues);
if (step.transform)
queryResult.records.map(step.transform.bind(step));
if (step.transformAll) {
queryResult.records = step.transformAll.call(step, queryResult.records);
}
if (step.referenceOnly || step.isReference) {
if (!fs.existsSync(path.join(process.cwd(), ...refPath))) {
fs.mkdirSync(path.join(process.cwd(), ...refPath), { recursive: true });
}
fs.writeFileSync(path.join(process.cwd(), ...refPath, `${step.name}.json`), JSON.stringify(queryResult.records), { encoding: 'utf-8' });
}
// remove attributes property and csv cleanup
queryResult.records.map(utility_1.prepJsonForCsv);
if (!step.referenceOnly) {
if (!fs.existsSync(path.join(process.cwd(), ...dataPath))) {
fs.mkdirSync(path.join(process.cwd(), ...dataPath), { recursive: true });
}
fs.writeFileSync(path.join(process.cwd(), ...dataPath, `${step.name}.csv`), csvjson.toCSV(queryResult.records, { headers: 'relative', wrap: true }), { encoding: 'utf-8' });
}
cli_ux_1.default.action.stop();
}
if (step.referenceOnly)
continue;
let loadResults;
if (step.isDelete) {
cli_ux_1.default.action.start(i + ' - Step ' + step.name + ' deleting data');
let options = {};
options.targetusername = flags.destination || migrationPlan.destination || step.destination;
options.csvfile = path.join(process.cwd(), ...dataPath, `${step.name}.csv`);
options.sobjecttype = step.sobjecttype || step.sObjectType;
try {
loadResults = await sfdx.data.bulkDelete(options);
this.log(loadResults);
}
catch (err) {
cli_ux_1.default.action.stop();
this.log('Error uploading data: ' + JSON.stringify(err, null, 2));
if (migrationPlan.ignoreError)
continue;
const manualCheck = await cli_ux_1.default.confirm('Check status in your org. Continue?');
if (manualCheck)
continue;
else
break;
}
}
else if (flags.destination || migrationPlan.destination || step.destination) {
cli_ux_1.default.action.start(i + ' - Step ' + step.name + ' uploading data');
let options = {};
options.targetusername = flags.destination || migrationPlan.destination || step.destination;
options.csvfile = path.join(process.cwd(), ...dataPath, `${step.name}.csv`);
if (step.externalid)
options.externalid = step.externalid || step.externalId;
options.sobjecttype = step.sobjecttype || step.sObjectType;
try {
loadResults = await sfdx.data.bulkUpsert(options);
this.log('Load Results: ' + JSON.stringify(loadResults, null, 4));
}
catch (err) {
cli_ux_1.default.action.stop();
this.log('Error uploading data: ' + JSON.stringify(err, null, 4));
if (migrationPlan.ignoreError)
continue;
else
break;
}
if (!loadResults) {
const manualCheck = await cli_ux_1.default.confirm('Check status in your org. Continue?');
if (manualCheck)
continue;
}
}
if (!loadResults)
continue;
let options = {};
let pollResults;
try {
options.targetusername = flags.destination || migrationPlan.destination;
options.jobid = loadResults[0].jobId;
options.batchid = loadResults[0].id;
pollResults = await utility_1.pollBulkStatus(options, migrationPlan.bulkStatusRetries, migrationPlan.bulkStatusInterval);
this.log('Poll Results: ' + JSON.stringify(pollResults, null, 4));
}
catch (err) {
cli_ux_1.default.action.stop();
this.log('Error in getting bulk status: ' + JSON.stringify(err, null, 4));
const manualCheck = await cli_ux_1.default.confirm('Check status in your org. Continue?');
if (manualCheck)
continue;
else
break;
}
if (pollResults && pollResults.numberRecordsFailed > 0) {
cli_ux_1.default.action.stop();
this.log('Some records did not get uploaded:\n' + JSON.stringify(pollResults, null, 4));
if (migrationPlan.ignoreError)
continue;
const manualCheck = await cli_ux_1.default.confirm('Continue?');
if (manualCheck)
continue;
else
break;
}
cli_ux_1.default.action.stop();
}
}
}
exports.default = Migrate;
Migrate.description = 'Migrate data from one org to another based on a migration plan.';
Migrate.aliases = ['migrate', 'm'];
Migrate.flags = {
help: command_1.flags.help({ char: 'h' }),
destination: command_1.flags.string({ char: 'd', description: 'destination org username or alias' }),
file: command_1.flags.string({ char: 'f', description: 'Path of migration plan file. Must be relative to cwd and in unix format.' }),
sample: command_1.flags.boolean({ description: 'Copy sample migration plan files to current directory.' }),
source: command_1.flags.string({ char: 's', description: 'source org username or alias.' }),
name: command_1.flags.string({ char: 'n', description: 'Name of the step to execute.' }),
clearDataFolder: command_1.flags.boolean(),
clearRefFolder: command_1.flags.boolean(),
};