salesforce-alm
Version:
This package contains tools, and APIs, for an improved salesforce.com developer experience.
474 lines (472 loc) • 22.7 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SourcePathStatusManager = exports.SourcePathInfo = void 0;
/*
* Copyright (c) 2020, salesforce.com, inc.
* All rights reserved.
* Licensed under the BSD 3-Clause license.
* For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
const path = require("path");
const core_1 = require("@salesforce/core");
const source_deploy_retrieve_1 = require("@salesforce/source-deploy-retrieve");
const srcDevUtil = require("../core/srcDevUtil");
const Messages = require("../messages");
const workspaceFileState_1 = require("./workspaceFileState");
const MetadataRegistry = require("./metadataRegistry");
const messages = Messages();
const kit_1 = require("@salesforce/kit");
const workspace_1 = require("./workspace");
const metadataTypeFactory_1 = require("./metadataTypeFactory");
// eslint-disable-next-line no-redeclare
class SourcePathInfo extends kit_1.AsyncCreatable {
constructor(options) {
super(options);
this.deferContentHash = false;
Object.assign(this, options);
}
async init() {
// if modifiedTime and state already exist, we assume that there's no need to reprocess
// the sourcePathInfo and that the other properties already exist. Technically, not
// a safe assumption... but it seems to work out
if (!this.modifiedTime || !this.state) {
await this.initFromPath(this.sourcePath, this.deferContentHash);
}
}
/**
* Return a clone of this SourcePathInfo, overriding specified properties.
*
* @param overrides SourcePathInfo properties that should override the cloned properties
*/
async clone(overrides = {}) {
const entry = Object.assign({}, this.toJson(), overrides);
return SourcePathInfo.create(entry);
}
/**
* Initialize path info based on a path in the workspace
*/
async initFromPath(sourcePath, deferContentHash) {
// If we are initializing from path then the path is new
this.state = workspaceFileState_1.WorkspaceFileState.NEW;
this.sourcePath = sourcePath;
const packageDir = core_1.SfdxProject.getInstance().getPackageNameFromPath(sourcePath);
if (packageDir) {
this.package = packageDir;
}
let filestat;
try {
filestat = await core_1.fs.stat(sourcePath);
}
catch (e) {
// If there is an error with filestat then the path is deleted
this.state = workspaceFileState_1.WorkspaceFileState.DELETED;
return;
}
this.isDirectory = filestat.isDirectory();
this.isMetadataFile = !this.isDirectory && this.sourcePath.endsWith(MetadataRegistry.getMetadataFileExt());
if (!this.metadataType && !this.isDirectory) {
const metadataType = metadataTypeFactory_1.MetadataTypeFactory.getMetadataTypeFromSourcePath(sourcePath, SourcePathStatusManager.metadataRegistry);
if (metadataType) {
this.metadataType = metadataType.getMetadataName();
}
}
this.size = filestat.size;
this.modifiedTime = filestat.mtime.getTime();
this.changeTime = filestat.ctime.getTime();
if (!deferContentHash) {
await this.computeContentHash();
}
}
async computeContentHash() {
const contents = this.isDirectory
? (await core_1.fs.readdir(this.sourcePath)).toString()
: await core_1.fs.readFile(this.sourcePath);
this.contentHash = srcDevUtil.getContentHash(contents);
}
/**
* If the source has been modified, return the path info for the change
*/
async getPendingPathInfo() {
const pendingPathInfo = await SourcePathInfo.create({
sourcePath: this.sourcePath,
metadataType: this.metadataType,
isWorkspace: this.isWorkspace,
package: this.package,
deferContentHash: true,
});
// See if the referenced path has been deleted
if (pendingPathInfo.isDeleted()) {
// Force setting isDirectory and isMetadataFile for deleted paths
pendingPathInfo.isDirectory = this.isDirectory;
pendingPathInfo.isMetadataFile = this.isMetadataFile;
pendingPathInfo.size = this.size;
return pendingPathInfo;
}
// Unless deleted, new paths always return true. no need for further checks
if (this.state === workspaceFileState_1.WorkspaceFileState.NEW) {
return this;
}
// Next we'll check if the path infos are different
if (pendingPathInfo.isDirectory || // Always need to compare the hash on directories
pendingPathInfo.size !== this.size ||
pendingPathInfo.modifiedTime !== this.modifiedTime ||
pendingPathInfo.changeTime !== this.changeTime) {
// Now we will compare the content hashes
await pendingPathInfo.computeContentHash();
if (pendingPathInfo.contentHash !== this.contentHash) {
pendingPathInfo.state = workspaceFileState_1.WorkspaceFileState.CHANGED;
return pendingPathInfo;
}
else {
// The hashes are the same, so the file hasn't really changed. Update our info.
// These will automatically get saved when other pending changes are committed
this.size = pendingPathInfo.size;
this.modifiedTime = pendingPathInfo.modifiedTime;
this.changeTime = pendingPathInfo.changeTime;
}
}
return null;
}
isDeleted() {
return this.state === workspaceFileState_1.WorkspaceFileState.DELETED;
}
isNew() {
return this.state === workspaceFileState_1.WorkspaceFileState.NEW;
}
isChanged() {
return this.state === workspaceFileState_1.WorkspaceFileState.CHANGED;
}
getState() {
return workspaceFileState_1.toReadableState(this.state);
}
toJson() {
const entry = {
sourcePath: this.sourcePath,
isDirectory: this.isDirectory,
size: this.size,
modifiedTime: this.modifiedTime,
changeTime: this.changeTime,
contentHash: this.contentHash,
isMetadataFile: this.isMetadataFile,
state: this.state,
isWorkspace: this.isWorkspace,
isArtifactRoot: this.isArtifactRoot,
package: this.package,
metadataType: this.metadataType,
deferContentHash: this.deferContentHash,
};
// remove all properites with a null value
return Object.keys(entry)
.filter((k) => !!entry[k])
.reduce((a, k) => ({ ...a, [k]: entry[k] }), {});
}
}
exports.SourcePathInfo = SourcePathInfo;
/**
* Manages a data model for tracking changes to local workspace paths
*/
// eslint-disable-next-line no-redeclare
class SourcePathStatusManager extends kit_1.AsyncCreatable {
constructor(options) {
super(options);
this.isStateless = false;
this.org = options.org;
this.isStateless = options.isStateless || false;
this.workspacePath = options.org.config.getProjectPath();
this.forceIgnore = source_deploy_retrieve_1.ForceIgnore.findAndCreate(core_1.SfdxProject.resolveProjectPathSync());
SourcePathStatusManager.metadataRegistry = new MetadataRegistry();
}
async init() {
this.logger = await core_1.Logger.child(this.constructor.name);
// A logger just for file moves. Users can enable this debug output to see
// an acknowledgement of their move operations.
this.fileMoveLogger = await core_1.Logger.child('FileMoves');
const workspaceOpts = {
org: this.org,
forceIgnore: this.forceIgnore,
isStateless: this.isStateless,
};
this.workspace = await workspace_1.Workspace.create(workspaceOpts);
}
/**
* Get path infos for the source workspace, applying any filters specified.
*/
async getSourcePathInfos(filter = {}) {
// normalize packageDirectory (if defined) to end with a path separator
filter.packageDirectory = normalizeDirectoryPath(filter.packageDirectory);
const trackedPackages = this.workspace.trackedPackages.map((p) => normalizeDirectoryPath(p));
const allPackages = core_1.SfdxProject.getInstance()
.getUniquePackageDirectories()
.map((p) => normalizeDirectoryPath(p.fullPath));
const untrackedPackages = allPackages.filter((rootDir) => !trackedPackages.includes(rootDir));
// if a root directory is specified, make sure it is a project source directory
if (rootDirectoryIsNotASourceDirectory(filter.packageDirectory, allPackages)) {
throw new Error(messages.getMessage('rootDirectoryNotASourceDirectory', [], 'sourceConvertCommand'));
}
// If a sourcePath was passed in and we are in stateless mode (e.g., changesets)
// add only the specified source path to workspacePathInfos.
if (this.isStateless && filter.sourcePath) {
await this.workspace.handleArtifact(filter.sourcePath);
}
else {
if (untrackedPackages.length > 0) {
await this.workspace.walkDirectories(untrackedPackages);
}
}
// This is a shallow copy of the content in sourcePathInfos.json,
// or walking the file system to build that file.
const sourcePathInfos = await this.workspace.getInitializedValues();
const processedSourcePathInfos = new Map();
// Keep track of adds and deletes to detect moves
const addedSourcePathInfos = [];
const deletedSourcePathInfos = [];
for (const sourcePathInfo of sourcePathInfos) {
const shouldIncludeSourcePathInfo = this.shouldIncludeSourcePathInfo(sourcePathInfo, filter);
// If this is null, that means that the source has NOT changed
const pendingSourcePathInfo = await sourcePathInfo.getPendingPathInfo();
if (!pendingSourcePathInfo) {
if (!filter.changesOnly && shouldIncludeSourcePathInfo) {
// If the source has NOT changed but we're NOT filtering on changesOnly, then add it
processedSourcePathInfos.set(sourcePathInfo.sourcePath, sourcePathInfo);
}
}
else {
if (shouldIncludeSourcePathInfo) {
// The source has changed so add it
if (pendingSourcePathInfo.isDirectory &&
!pendingSourcePathInfo.isDeleted() &&
!pendingSourcePathInfo.isWorkspace) {
// If it's a directory and it isn't deleted then process the directory change.
// This is how new files are added.
const processed = await this.processChangedDirectory(pendingSourcePathInfo.sourcePath);
for (const spi of processed) {
if (spi) {
processedSourcePathInfos.set(spi.sourcePath, spi);
// Keep track of added files to check if they are moves
if (spi.state === workspaceFileState_1.WorkspaceFileState.NEW) {
addedSourcePathInfos.push(spi);
}
}
}
}
processedSourcePathInfos.set(pendingSourcePathInfo.sourcePath, pendingSourcePathInfo);
// Keep track of deleted files to check if they are moves
if (pendingSourcePathInfo.state === workspaceFileState_1.WorkspaceFileState.DELETED) {
deletedSourcePathInfos.push(pendingSourcePathInfo);
}
}
}
}
const finalSourcePathInfos = await this.processFileMoves(addedSourcePathInfos, deletedSourcePathInfos, processedSourcePathInfos);
this.logger.debug(`Found ${finalSourcePathInfos.length} sourcePathInfos`);
finalSourcePathInfos.forEach((key) => this.logger.debug(key));
return finalSourcePathInfos;
}
/**
* Determine if the provided sourcePathInfo should be processed during a source action (deploy, retrieve, push, pull, convert)
* A sourcePathInfo is INCLUDED if any of the following crietria are met:
* 1. If the sourcePathInfo.sourcePath is nested under the package directory
* 2. If the sourcePathInfo.sourcePath is the same or is nested under filter.sourcePath
* 3. If the sourcePathInfo.sourcePath is NOT ignored in the .forceignore file
*/
shouldIncludeSourcePathInfo(sourcePathInfo, filter = {}) {
const { packageDirectory, sourcePath } = filter;
// default to including this sourcePathInfo
let shouldIncludeSourcePathInfo = true;
if (packageDirectory) {
shouldIncludeSourcePathInfo = sourcePathInfo.sourcePath.includes(packageDirectory);
}
if (shouldIncludeSourcePathInfo && sourcePath) {
shouldIncludeSourcePathInfo = sourcePathInfo.sourcePath.includes(sourcePath);
}
if (this.forceIgnore.denies(sourcePathInfo.sourcePath)) {
shouldIncludeSourcePathInfo = false;
}
return shouldIncludeSourcePathInfo;
}
// Detects SourcePathInfo moves by looking for matching partial file
// paths of an add and a delete, then updates sourcePathInfos.json.
async processFileMoves(addedSourcePathInfos, deletedSourcePathInfos, processedSourcePathInfos) {
// Only do move detection if there were both added and deleted files.
if (addedSourcePathInfos.length && deletedSourcePathInfos.length) {
this.logger.debug(`There were ${addedSourcePathInfos.length} adds and ${deletedSourcePathInfos.length} deletes. Checking if these are moves.`);
// The SourcePathInfo updates to commit to sourcePathInfos.json
const spiUpdates = [];
let deletedSpi;
// Iterate over all deleted SourcePathInfos and compare to added SourcePathInfos
while ((deletedSpi = deletedSourcePathInfos.pop())) {
const fullPath = deletedSpi.sourcePath;
const packagePath = core_1.SfdxProject.getInstance().getPackagePath(deletedSpi.package);
const pathAfterPackageDir = fullPath.replace(packagePath, '');
this.logger.debug(`Looking for ${pathAfterPackageDir} in list of added files`);
const matchingAddedSpi = addedSourcePathInfos.find((addedSpi) => {
let found = false;
if (addedSpi.sourcePath.endsWith(pathAfterPackageDir)) {
// it was moved to another package.
found = true;
}
else {
const pathWithinPackage = pathAfterPackageDir.split(path.sep).slice(2).join(path.sep);
if (addedSpi.sourcePath.endsWith(pathWithinPackage)) {
// it was moved within the package (within 2 directories of the package dir)
found = true;
}
}
if (found) {
this.logger.debug(`${fullPath} was moved to ${addedSpi.sourcePath}`);
this.fileMoveLogger.info(`${fullPath} was moved to ${addedSpi.sourcePath}`);
}
return found;
});
if (matchingAddedSpi) {
// Now find out if the file was changed AND moved by comparing sizes.
// NOTE: this is not perfect but should be correct 99.9% of the time.
if (matchingAddedSpi.size !== deletedSpi.size) {
this.logger.debug(`${matchingAddedSpi.sourcePath} was moved and changed`);
// We have to create a different SourcePathInfo instance to use for commit
// in this case because we want to commit the add with some of the data from
// the deleted file but track the changed file state.
const movedSpiBeforeChanges = await matchingAddedSpi.clone({
size: deletedSpi.size,
state: workspaceFileState_1.WorkspaceFileState.CHANGED,
contentHash: deletedSpi.contentHash,
});
spiUpdates.push(movedSpiBeforeChanges);
}
else {
spiUpdates.push(matchingAddedSpi);
}
processedSourcePathInfos.set(matchingAddedSpi.sourcePath, matchingAddedSpi);
processedSourcePathInfos.delete(deletedSpi.sourcePath);
spiUpdates.push(deletedSpi);
}
}
if (spiUpdates.length) {
// Grab the directories for these changes too for the updated directory hashes.
spiUpdates.forEach((spi) => {
const dirSpi = processedSourcePathInfos.get(path.dirname(spi.sourcePath));
dirSpi && spiUpdates.push(dirSpi);
});
this.commitChangedPathInfos(spiUpdates);
}
}
return [...processedSourcePathInfos.values()];
}
/**
* Update the data model with changes
*/
async commitChangedPathInfos(sourcePathInfos) {
for (const sourcePathInfo of sourcePathInfos) {
if (sourcePathInfo.state !== workspaceFileState_1.WorkspaceFileState.UNCHANGED) {
if (sourcePathInfo.isDeleted()) {
this.workspace.unset(sourcePathInfo.sourcePath);
}
else {
sourcePathInfo.state = workspaceFileState_1.WorkspaceFileState.UNCHANGED;
this.workspace.set(sourcePathInfo.sourcePath, sourcePathInfo);
}
}
}
await this.workspace.write();
}
/**
* Update data model for the given paths
*/
async updateInfosForPaths(updatedPaths, deletedPaths) {
// check if the parent paths of updated paths need to be added to workspacePathInfos too
for (const updatedPath of updatedPaths.slice()) {
if (!this.workspace.has(updatedPath)) {
const sourcePath = updatedPath.split(path.sep);
while (sourcePath.length > 1) {
sourcePath.pop();
const parentPath = sourcePath.join(path.sep);
updatedPaths.push(parentPath);
if (this.workspace.has(parentPath)) {
break;
}
}
}
}
for (const deletedPath of deletedPaths) {
this.workspace.unset(deletedPath);
}
const promises = updatedPaths.map(async (updatedPath) => {
let sourcePathInfo;
const existing = this.workspace.get(updatedPath);
if (existing) {
// If a sourcePathInfo exists for the updatedPath, we still want to create a new sourcePathInfo
// but we need to preserve the isWorkspace and isArtifact properties
sourcePathInfo = await SourcePathInfo.create({
sourcePath: updatedPath,
isWorkspace: existing.isWorkspace,
isArtifactRoot: existing.isArtifactRoot,
});
}
else {
sourcePathInfo = await SourcePathInfo.create({ sourcePath: updatedPath });
}
sourcePathInfo.state = workspaceFileState_1.WorkspaceFileState.UNCHANGED;
this.workspace.set(updatedPath, sourcePathInfo);
});
await Promise.all(promises);
await this.workspace.write();
}
async backup() {
await this.workspace.backup();
}
async revert() {
await this.workspace.revert();
}
/**
* Get the path infos for source that has been updated in the given directory
*/
async processChangedDirectory(directoryPath) {
// If the path is a directory and wasn't deleted then we want to process the contents for changes
const files = await core_1.fs.readdir(directoryPath);
const updatedPathInfos = [];
for (const file of files) {
const fullPath = path.join(directoryPath, file);
// We only need to process additions to the directory, any existing ones will get dealt with on their own
if (this.workspace.has(fullPath)) {
continue;
}
const pathInfos = await this.getNewPathInfos(fullPath);
updatedPathInfos.push(...pathInfos);
}
return updatedPathInfos;
}
/**
* Get the path infos for newly added source
*/
async getNewPathInfos(sourcePath) {
let newPathInfos = [];
const newPathInfo = await SourcePathInfo.create({
sourcePath,
deferContentHash: false,
});
if (this.workspace.isValidSourcePath(newPathInfo)) {
newPathInfos.push(newPathInfo);
if (newPathInfo.isDirectory) {
const files = await core_1.fs.readdir(sourcePath);
const promises = files.map(async (file) => await this.getNewPathInfos(path.join(sourcePath, file)));
const infos = await Promise.all(promises);
newPathInfos = newPathInfos.concat(infos.reduce((x, y) => x.concat(y), []));
}
}
return newPathInfos;
}
}
exports.SourcePathStatusManager = SourcePathStatusManager;
/**
* Ensure that the directory path ends with a path separator
*/
function normalizeDirectoryPath(dirPath) {
return dirPath && !dirPath.endsWith(path.sep) ? `${dirPath}${path.sep}` : dirPath;
}
/**
* Determine if the provided directroy path has source files
*/
function rootDirectoryIsNotASourceDirectory(packageDirPath, trackedPackages) {
return !!packageDirPath && !trackedPackages.find((pkg) => packageDirPath.startsWith(pkg));
}
//# sourceMappingURL=sourcePathStatusManager.js.map