@salesforce/source-tracking
Version:
API for tracking local and remote Salesforce metadata changes
232 lines • 12 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.toKey = exports.buildMap = exports.getLogMessage = exports.getMatches = exports.filenameMatchesToMap = void 0;
/*
* Copyright (c) 2023, salesforce.com, inc.
* All rights reserved.
* Licensed under the BSD 3-Clause license.
* For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
const node_path_1 = __importDefault(require("node:path"));
const node_os_1 = require("node:os");
const core_1 = require("@salesforce/core");
const source_deploy_retrieve_1 = require("@salesforce/source-deploy-retrieve");
// @ts-expect-error isogit has both ESM and CJS exports but node16 module/resolution identifies it as ESM
const isomorphic_git_1 = __importDefault(require("isomorphic-git"));
const fs = __importStar(require("graceful-fs"));
const performance_1 = require("@oclif/core/performance");
const guards_1 = require("../guards");
const functions_1 = require("../functions");
const functions_2 = require("./functions");
const JOIN_CHAR = '#__#'; // the __ makes it unlikely to be used in metadata names
/** composed functions to simplified use by the shadowRepo class */
const filenameMatchesToMap = (registry) => (projectPath) => (gitDir) => async ({ added, deleted }) => {
const resolver = new source_deploy_retrieve_1.MetadataResolver(registry, source_deploy_retrieve_1.VirtualTreeContainer.fromFilePaths((0, functions_1.uniqueArrayConcat)(added, deleted)));
return compareHashes(await buildMaps(addTypes(resolver)(await toFileInfo({
projectPath,
gitDir,
added,
deleted,
}))));
};
exports.filenameMatchesToMap = filenameMatchesToMap;
/** compare delete and adds from git.status, matching basenames of the files. returns early when there's nothing to match */
const getMatches = (status) => {
// We check for moved files in incremental steps and exit as early as we can to avoid any performance degradation
// Deleted files will be more rare than added files, so we'll check them first and exit early if there are none
const emptyResult = { added: new Set(), deleted: new Set() };
const deletedFiles = status.filter(functions_2.isDeleted);
if (!deletedFiles.length)
return emptyResult;
const addedFiles = status.filter(functions_2.isAdded);
if (!addedFiles.length)
return emptyResult;
// Both arrays have contents, look for matching basenames
const addedFilenames = (0, functions_2.toFilenames)(addedFiles);
const deletedFilenames = (0, functions_2.toFilenames)(deletedFiles);
// Build Sets of basenames for added and deleted files for quick lookups
const addedBasenames = new Set(addedFilenames.map((filename) => node_path_1.default.basename(filename)));
const deletedBasenames = new Set(deletedFilenames.map((filename) => node_path_1.default.basename(filename)));
// TODO: when node 22 is everywhere, we can use Set.prototype.intersection
// Again, we filter over the deleted files first and exit early if there are no filename matches
const deletedFilenamesWithMatches = new Set(deletedFilenames.filter((f) => addedBasenames.has(node_path_1.default.basename(f))));
if (!deletedFilenamesWithMatches.size)
return emptyResult;
const addedFilenamesWithMatches = new Set(addedFilenames.filter((f) => deletedBasenames.has(node_path_1.default.basename(f))));
if (!addedFilenamesWithMatches.size)
return emptyResult;
return { added: addedFilenamesWithMatches, deleted: deletedFilenamesWithMatches };
};
exports.getMatches = getMatches;
const getLogMessage = (matches) => [
'Files have moved. Committing moved files:',
...[...matches.fullMatches.entries()].map(([add, del]) => `- File ${del} was moved to ${add}`),
...[...matches.deleteOnly.entries()].map(([add, del]) => `- File ${del} was moved to ${add} and modified`),
].join(node_os_1.EOL);
exports.getLogMessage = getLogMessage;
/** build maps of the add/deletes with filenames, returning the matches Logs if we can't make a match because buildMap puts them in the ignored bucket */
const buildMaps = async ({ addedInfo, deletedInfo }) => {
const [addedMap, addedIgnoredMap] = (0, exports.buildMap)(addedInfo);
const [deletedMap, deletedIgnoredMap] = (0, exports.buildMap)(deletedInfo);
// If we detected any files that have the same basename and hash, emit a warning and send telemetry
// These files will still show up as expected in the `sf project deploy preview` output
// We could add more logic to determine and display filepaths that we ignored...
// but this is likely rare enough to not warrant the added complexity
// Telemetry will help us determine how often this occurs
if (addedIgnoredMap.size || deletedIgnoredMap.size) {
const message = 'Files were found that have the same basename, hash, metadata type, and parent. Skipping the commit of these files';
const logger = core_1.Logger.childFromRoot('ShadowRepo.compareHashes');
logger.warn(message);
const lifecycle = core_1.Lifecycle.getInstance();
await Promise.all([
lifecycle.emitWarning(message),
lifecycle.emitTelemetry({ eventName: 'moveFileHashBasenameCollisionsDetected' }),
]);
}
return { addedMap, deletedMap };
};
/**
* builds a map of the values from both maps
* side effect: mutates the passed-in maps!
*/
const compareHashes = ({ addedMap, deletedMap }) => {
const matches = new Map([...addedMap.entries()]
.map(([addedKey, addedValue]) => {
const deletedValue = deletedMap.get(addedKey);
if (deletedValue) {
// these are an exact basename + hash match + parent + type
deletedMap.delete(addedKey);
addedMap.delete(addedKey);
return [addedValue, deletedValue];
}
})
.filter(guards_1.isDefined));
if (addedMap.size && deletedMap.size) {
// the remaining deletes didn't match the basename+hash of an add, and vice versa.
// They *might* match the basename,type,parent of an add, in which case we *could* have the "move, then edit" case.
const addedMapNoHash = new Map([...addedMap.entries()].map(removeHashFromEntry));
const deletedMapNoHash = new Map([...deletedMap.entries()].map(removeHashFromEntry));
const deleteOnly = new Map(Array.from(deletedMapNoHash.entries())
.filter(([k]) => addedMapNoHash.has(k))
.map(([k, v]) => [addedMapNoHash.get(k), v]));
return { fullMatches: matches, deleteOnly };
}
return { fullMatches: matches, deleteOnly: new Map() };
};
/** enrich the filenames with basename and oid (hash) */
const toFileInfo = async ({ projectPath, gitDir, added, deleted, }) => {
// Track how long it takes to gather the oid information from the git trees
const getInfoMarker = performance_1.Performance.mark('@salesforce/source-tracking', 'localShadowRepo.detectMovedFiles#toFileInfo', {
addedFiles: added.size,
deletedFiles: deleted.size,
});
const headRef = await isomorphic_git_1.default.resolveRef({ fs, dir: projectPath, gitdir: gitDir, ref: 'HEAD' });
const [addedInfo, deletedInfo] = await Promise.all([
await Promise.all(Array.from(added).map(getHashForAddedFile(projectPath))),
await Promise.all(Array.from(deleted).map(getHashFromActualFileContents(gitDir)(projectPath)(headRef))),
]);
getInfoMarker?.stop();
return { addedInfo, deletedInfo };
};
/** returns a map of <hash+basename, filepath>. If two items result in the same hash+basename, return that in the ignore bucket */
const buildMap = (info) => {
const map = new Map();
const ignore = new Map();
info.map((i) => {
const key = (0, exports.toKey)(i);
// If we find a duplicate key, we need to remove it and ignore it in the future.
// Finding duplicate hash#basename means that we cannot accurately determine where it was moved to or from
if (map.has(key) || ignore.has(key)) {
map.delete(key);
ignore.set(key, i.filename);
}
else {
map.set(key, i.filename);
}
});
return [map, ignore];
};
exports.buildMap = buildMap;
const getHashForAddedFile = (projectPath) => async (filepath) => ({
filename: filepath,
basename: node_path_1.default.basename(filepath),
hash: (await isomorphic_git_1.default.hashBlob({
object: await fs.promises.readFile(node_path_1.default.join(projectPath, filepath)),
})).oid,
});
const resolveType = (resolver) => (filenames) => filenames
.flatMap((filename) => {
try {
return resolver.getComponentsFromPath(filename);
}
catch (e) {
const logger = core_1.Logger.childFromRoot('ShadowRepo.compareTypes');
logger.warn(`unable to resolve ${filename}`);
return undefined;
}
})
.filter(guards_1.isDefined);
/** where we don't have git objects to use, read the file contents to generate the hash */
const getHashFromActualFileContents = (gitdir) => (projectPath) => (oid) => async (filepath) => ({
filename: filepath,
basename: node_path_1.default.basename(filepath),
hash: (await isomorphic_git_1.default.readBlob({ fs, dir: projectPath, gitdir, filepath: functions_2.IS_WINDOWS ? (0, functions_2.ensurePosix)(filepath) : filepath, oid })).oid,
});
const toKey = (input) => [input.hash, input.basename, input.type, input.type, input.parentType ?? '', input.parentFullName ?? ''].join(JOIN_CHAR);
exports.toKey = toKey;
const removeHashFromEntry = ([k, v]) => [removeHashFromKey(k), v];
const removeHashFromKey = (hash) => hash.split(JOIN_CHAR).splice(1).join(JOIN_CHAR);
/** resolve the metadata types (and possibly parent components) */
const addTypes = (resolver) => (info) => {
// quick passthrough if we don't have adds and deletes
if (!info.addedInfo.length || !info.deletedInfo.length)
return { addedInfo: [], deletedInfo: [] };
const applied = getTypesForFileInfo(resolveType(resolver));
return {
addedInfo: info.addedInfo.flatMap(applied),
deletedInfo: info.deletedInfo.flatMap(applied),
};
};
const getTypesForFileInfo = (appliedResolver) => (fileInfo) => appliedResolver([fileInfo.filename]).map((c) => ({
...fileInfo,
type: c.type.name,
parentType: c.parent?.type.name ?? '',
parentFullName: c.parent?.fullName ?? '',
}));
//# sourceMappingURL=moveDetection.js.map