UNPKG

@salesforce/core

Version:

Core libraries to interact with SFDX projects, orgs, and APIs.

166 lines 7.12 kB
"use strict"; /* * Copyright (c) 2020, salesforce.com, inc. * All rights reserved. * Licensed under the BSD 3-Clause license. * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ Object.defineProperty(exports, "__esModule", { value: true }); exports.AuthHandler = exports.SfdxDataHandler = void 0; const path_1 = require("path"); const kit_1 = require("@salesforce/kit"); const ts_types_1 = require("@salesforce/ts-types"); const global_1 = require("../global"); const fs_1 = require("../util/fs"); const configFile_1 = require("./configFile"); const globalInfoConfig_1 = require("./globalInfoConfig"); function isEqual(object1, object2) { const keys1 = Object.keys(object1).filter((k) => k !== 'timestamp'); const keys2 = Object.keys(object2).filter((k) => k !== 'timestamp'); if (keys1.length !== keys2.length) return false; for (const key of keys1) { if (object1[key] !== object2[key]) return false; } return true; } class SfdxDataHandler { constructor() { this.handlers = [new AuthHandler()]; } async write(latest = globalInfoConfig_1.GlobalInfo.emptyDataModel) { for (const handler of this.handlers) { await handler.write(latest, this.original); this.setOriginal(latest); } } async merge(sfData = globalInfoConfig_1.GlobalInfo.emptyDataModel) { const merged = globalInfoConfig_1.deepCopy(sfData); for (const handler of this.handlers) { Object.assign(merged, await handler.merge(merged)); } this.setOriginal(merged); return merged; } setOriginal(data) { this.original = globalInfoConfig_1.deepCopy(data); } } exports.SfdxDataHandler = SfdxDataHandler; class BaseHandler { async merge(sfData = globalInfoConfig_1.GlobalInfo.emptyDataModel) { var _a, _b; const sfdxData = await this.migrate(); const merged = globalInfoConfig_1.deepCopy(sfData); // Only merge the key this handler is responsible for. const key = this.sfKey; const sfKeys = Object.keys((_a = sfData[key]) !== null && _a !== void 0 ? _a : {}); const sfdxKeys = Object.keys((_b = sfdxData[key]) !== null && _b !== void 0 ? _b : {}); const commonKeys = sfKeys.filter((k) => sfdxKeys.includes(k)); for (const k of commonKeys) { const [newer, older] = [sfData[key][k], sfdxData[key][k]].sort((a, b) => { return new Date(a.timestamp) < new Date(b.timestamp) ? 1 : -1; }); kit_1.set(merged, `${key}["${k}"]`, Object.assign({}, older, newer)); } // Keys that exist in .sfdx but not .sf are added becase we assume // that this means the key was created using sfdx. // However, this is not always a valid assumption because it could // also mean that the key was deleted using sf, in which case we // do not want to migrate the sfdx key to sf. // Programmatically differentiating between a new key and a deleted key // would be nearly impossible. Instead we should ensure that whenever // sf deletes a key it also deletes it in sfdx. This way, we can safely // assume that we should migrate any keys that exist in in .sfdx const unhandledSfdxKeys = sfdxKeys.filter((k) => !sfKeys.includes(k)); for (const k of unhandledSfdxKeys) { kit_1.set(merged, `${key}["${k}"]`, sfdxData[key][k]); } // Keys that exist in .sf but not .sfdx are deleted because we assume // that this means the key was deleted while using sfdx. // We can make this assumption because keys that are created by sf will // always be migrated back to sfdx const unhandledSfKeys = sfKeys.filter((k) => !sfdxKeys.includes(k)); for (const k of unhandledSfKeys) { delete merged[key][k]; } return merged; } } class AuthHandler extends BaseHandler { constructor() { super(...arguments); this.sfKey = globalInfoConfig_1.SfInfoKeys.ORGS; } async migrate() { const oldAuths = await this.listAllAuthorizations(); const newAuths = oldAuths.reduce((x, y) => Object.assign(x, { [ts_types_1.ensureString(y.username)]: y }), {}); return { [this.sfKey]: newAuths }; } async write(latest, original) { const { changed, deleted } = await this.findChanges(latest, original); for (const [username, authData] of Object.entries(changed)) { if (authData) { const config = await this.createAuthFileConfig(username); config.setContentsFromObject(authData); await config.write(); } } for (const username of deleted) { const config = await this.createAuthFileConfig(username); await config.unlink(); } } async findChanges(latest, original) { var _a; const latestAuths = latest.orgs; const originalAuths = original.orgs; const changed = {}; for (const [username, auth] of Object.entries(latestAuths)) { const originalAuth = (_a = originalAuths[username]) !== null && _a !== void 0 ? _a : {}; if (!isEqual(auth, originalAuth)) { changed[username] = auth; } } const deleted = []; for (const username of Object.keys(originalAuths)) { if (!latestAuths[username]) { deleted.push(username); } } return { changed, deleted }; } async createAuthFileConfig(username) { const config = await configFile_1.ConfigFile.create({ filename: `${username}.json`, isGlobal: true, isState: true, stateFolder: global_1.Global.SFDX_STATE_FOLDER, throwOnNotFound: false, encryptedKeys: ['accessToken', 'refreshToken', 'password', 'clientSecret'], }); return config; } async listAllAuthFiles() { const globalFiles = await fs_1.fs.readdir(global_1.Global.SFDX_DIR); return globalFiles.filter((file) => file.match(AuthHandler.authFilenameFilterRegEx)); } async listAllAuthorizations() { const filenames = await this.listAllAuthFiles(); const auths = []; for (const filename of filenames) { const username = path_1.basename(filename, path_1.extname(filename)); const configFile = await this.createAuthFileConfig(username); const contents = configFile.getContents(); const stat = await configFile.stat(); const auth = Object.assign(contents, { timestamp: stat.mtime.toISOString() }); auths.push(auth); } return auths; } } exports.AuthHandler = AuthHandler; // The regular expression that filters files stored in $HOME/.sfdx AuthHandler.authFilenameFilterRegEx = /^[^.][^@]*@[^.]+(\.[^.\s]+)+\.json$/; //# sourceMappingURL=sfdxDataHandler.js.map