@salesforce/source-tracking
Version:
API for tracking local and remote Salesforce metadata changes
362 lines • 16.6 kB
JavaScript
"use strict";
/*
* Copyright 2025, Salesforce, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ShadowRepo = void 0;
const node_path_1 = __importDefault(require("node:path"));
const os = __importStar(require("node:os"));
const fs = __importStar(require("graceful-fs"));
const core_1 = require("@salesforce/core");
const kit_1 = require("@salesforce/kit");
// @ts-expect-error isogit has both ESM and CJS exports but node16 module/resolution identifies it as ESM
const isomorphic_git_1 = __importDefault(require("isomorphic-git"));
const performance_1 = require("@oclif/core/performance");
const functions_1 = require("../functions");
const moveDetection_1 = require("./moveDetection");
const functions_2 = require("./functions");
/** returns the full path to where we store the shadow repo */
const getGitDir = (orgId, projectPath) => node_path_1.default.join(projectPath, '.sf', 'orgs', orgId, 'localSourceTracking');
// catch isogit's `InternalError` to avoid people report CLI issues in isogit repo.
// See: https://github.com/forcedotcom/cli/issues/2416
const redirectToCliRepoError = (e) => {
if (e instanceof isomorphic_git_1.default.Errors.InternalError) {
const error = new core_1.SfError(`An internal error caused this command to fail. isomorphic-git error:${os.EOL}${e.data.message}`, e.name);
throw error;
}
throw e;
};
/** do not try to add more than this many files at a time through isogit. You'll hit EMFILE: too many open files even with graceful-fs */
const MAX_FILE_ADD = kit_1.env.getNumber('SF_SOURCE_TRACKING_BATCH_SIZE', kit_1.env.getNumber('SFDX_SOURCE_TRACKING_BATCH_SIZE', functions_2.IS_WINDOWS ? 8000 : 15_000));
class ShadowRepo {
static instanceMap = new Map();
gitDir;
projectPath;
/**
* packageDirs converted to project-relative posix style paths
* iso-git uses relative, posix paths
* but packageDirs has already resolved / normalized them
* so we need to make them project-relative again and convert if windows
*/
packageDirs;
status;
logger;
registry;
constructor(options) {
this.gitDir = getGitDir(options.orgId, options.projectPath);
this.projectPath = options.projectPath;
this.packageDirs = options.packageDirs.map(packageDirToRelativePosixPath(options.projectPath));
this.registry = options.registry;
}
// think of singleton behavior but unique to the projectPath
static async getInstance(options) {
if (!ShadowRepo.instanceMap.has(options.projectPath)) {
const newInstance = new ShadowRepo(options);
await newInstance.init();
ShadowRepo.instanceMap.set(options.projectPath, newInstance);
}
return ShadowRepo.instanceMap.get(options.projectPath);
}
async init() {
this.logger = await core_1.Logger.child('ShadowRepo');
// initialize the shadow repo if it doesn't exist
if (!fs.existsSync(this.gitDir)) {
this.logger.debug('initializing git repo');
await this.gitInit();
}
}
/**
* Initialize a new source tracking shadow repo. Think of git init
*
*/
async gitInit() {
this.logger.trace(`initializing git repo at ${this.gitDir}`);
await fs.promises.mkdir(this.gitDir, { recursive: true });
try {
await isomorphic_git_1.default.init({ fs, dir: this.projectPath, gitdir: this.gitDir, defaultBranch: 'main' });
}
catch (e) {
redirectToCliRepoError(e);
}
}
/**
* Delete the local tracking files
*
* @returns the deleted directory
*/
async delete() {
if (typeof fs.promises.rm === 'function') {
await fs.promises.rm(this.gitDir, { recursive: true, force: true });
}
else {
await fs.promises.rm(this.gitDir, { recursive: true });
}
return this.gitDir;
}
/**
* If the status already exists, return it. Otherwise, set the status before returning.
* It's kinda like a cache
*
* @params noCache: if true, force a redo of the status using FS even if it exists
*
* @returns StatusRow[] (paths are os-specific)
*/
async getStatus(noCache = false) {
this.logger.trace(`start: getStatus (noCache = ${noCache})`);
if (!this.status || noCache) {
const marker = performance_1.Performance.mark('@salesforce/source-tracking', 'localShadowRepo.getStatus#withoutCache');
try {
// status hasn't been initialized yet
this.status = await isomorphic_git_1.default.statusMatrix({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
filepaths: this.packageDirs,
ignored: true,
filter: fileFilter(this.packageDirs),
});
// isomorphic-git stores things in unix-style tree. Convert to windows-style if necessary
if (functions_2.IS_WINDOWS) {
this.status = this.status.map((row) => [node_path_1.default.normalize(row[functions_2.FILE]), row[functions_2.HEAD], row[functions_2.WORKDIR], row[3]]);
}
if (kit_1.env.getBoolean('SF_DISABLE_SOURCE_MOBILITY') === true) {
await core_1.Lifecycle.getInstance().emitTelemetry({ eventName: 'moveFileDetectionDisabled' });
}
else {
// Check for moved files and update local git status accordingly
await core_1.Lifecycle.getInstance().emitTelemetry({ eventName: 'moveFileDetectionEnabled' });
await this.detectMovedFiles();
}
}
catch (e) {
redirectToCliRepoError(e);
}
marker?.stop();
}
this.logger.trace(`done: getStatus (noCache = ${noCache})`);
return this.status;
}
/**
* returns any change (add, modify, delete)
*/
async getChangedRows() {
return (await this.getStatus()).filter((file) => file[functions_2.HEAD] !== file[functions_2.WORKDIR]);
}
/**
* returns any change (add, modify, delete)
*/
async getChangedFilenames() {
return (0, functions_2.toFilenames)(await this.getChangedRows());
}
async getDeletes() {
return (await this.getStatus()).filter(functions_2.isDeleted);
}
async getDeleteFilenames() {
return (0, functions_2.toFilenames)(await this.getDeletes());
}
/**
* returns adds and modifies but not deletes
*/
async getNonDeletes() {
return (await this.getStatus()).filter((file) => file[functions_2.WORKDIR] === 2);
}
/**
* returns adds and modifies but not deletes
*/
async getNonDeleteFilenames() {
return (0, functions_2.toFilenames)(await this.getNonDeletes());
}
async getAdds() {
return (await this.getStatus()).filter(functions_2.isAdded);
}
async getAddFilenames() {
return (0, functions_2.toFilenames)(await this.getAdds());
}
/**
* returns files that were not added or deleted, but changed locally
*/
async getModifies() {
return (await this.getStatus()).filter((file) => file[functions_2.HEAD] === 1 && file[functions_2.WORKDIR] === 2);
}
async getModifyFilenames() {
return (0, functions_2.toFilenames)(await this.getModifies());
}
/**
* Look through status and stage all changes, then commit
*
* @param fileList list of files to commit (full paths)
* @param message: commit message (include org username and id)
*
* @returns sha (string)
*/
async commitChanges({ deployedFiles = [], deletedFiles = [], message = 'sfdx source tracking', needsUpdatedStatus = true, } = {}) {
// if no files are specified, commit all changes
if (deployedFiles.length === 0 && deletedFiles.length === 0) {
// this is valid, might not be an error
return 'no files to commit';
}
const marker = performance_1.Performance.mark('@salesforce/source-tracking', 'localShadowRepo.commitChanges', {
deployedFiles: deployedFiles.length,
deletedFiles: deletedFiles.length,
});
if (deployedFiles.length) {
const chunks = (0, functions_1.chunkArray)(
// these are stored in posix/style/path format. We have to convert inbound stuff from windows
[...new Set(functions_2.IS_WINDOWS ? deployedFiles.map(normalize).map(functions_2.ensurePosix) : deployedFiles)], MAX_FILE_ADD);
for (const chunk of chunks) {
try {
this.logger.debug(`adding ${chunk.length} files of ${deployedFiles.length} deployedFiles to git`);
// these need to be done sequentially (it's already batched) because isogit manages file locking
// eslint-disable-next-line no-await-in-loop
await isomorphic_git_1.default.add({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
filepath: chunk,
force: true,
});
}
catch (e) {
if (e instanceof isomorphic_git_1.default.Errors.MultipleGitError) {
this.logger.error(`${e.errors.length} errors on git.add, showing the first 5:`, e.errors.slice(0, 5));
throw core_1.SfError.create({
message: e.message,
name: e.name,
data: e.errors.map((err) => err.message),
cause: e,
actions: [
`One potential reason you're getting this error is that the number of files that source tracking is batching exceeds your user-specific file limits. Increase your hard file limit in the same session by executing 'ulimit -Hn ${MAX_FILE_ADD}'. Or set the 'SFDX_SOURCE_TRACKING_BATCH_SIZE' environment variable to a value lower than the output of 'ulimit -Hn'.\nNote: Don't set this environment variable too close to the upper limit or your system will still hit it. If you continue to get the error, lower the value of the environment variable even more.`,
],
});
}
redirectToCliRepoError(e);
}
}
}
if (deletedFiles.length) {
// Using a cache here speeds up the performance by ~24.4%
let cache = {};
const deleteMarker = performance_1.Performance.mark('@salesforce/source-tracking', 'localShadowRepo.commitChanges#delete', {
deletedFiles: deletedFiles.length,
});
for (const filepath of [...new Set(functions_2.IS_WINDOWS ? deletedFiles.map(normalize).map(functions_2.ensurePosix) : deletedFiles)]) {
try {
// these need to be done sequentially because isogit manages file locking. Isogit remove does not support multiple files at once
// eslint-disable-next-line no-await-in-loop
await isomorphic_git_1.default.remove({ fs, dir: this.projectPath, gitdir: this.gitDir, filepath, cache });
}
catch (e) {
redirectToCliRepoError(e);
}
}
// clear cache
cache = {};
deleteMarker?.stop();
}
try {
this.logger.trace('start: commitChanges git.commit');
const sha = await isomorphic_git_1.default.commit({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
message,
author: { name: 'sfdx source tracking' },
});
// status changed as a result of the commit. This prevents users from having to run getStatus(true) to avoid cache
if (needsUpdatedStatus) {
await this.getStatus(true);
}
this.logger.trace('done: commitChanges git.commit');
return sha;
}
catch (e) {
redirectToCliRepoError(e);
}
marker?.stop();
}
async detectMovedFiles() {
// get status will return os-specific paths
const matchingFiles = (0, moveDetection_1.getMatches)(await this.getStatus());
if (!matchingFiles.added.size || !matchingFiles.deleted.size)
return;
const movedFilesMarker = performance_1.Performance.mark('@salesforce/source-tracking', 'localShadowRepo.detectMovedFiles');
const matches = await (0, moveDetection_1.filenameMatchesToMap)(this.registry)(this.projectPath)(this.gitDir)(matchingFiles);
if (matches.deleteOnly.size === 0 && matches.fullMatches.size === 0)
return movedFilesMarker?.stop();
this.logger.debug((0, moveDetection_1.getLogMessage)(matches));
movedFilesMarker?.addDetails({
filesMoved: matches.fullMatches.size,
filesMovedAndEdited: matches.deleteOnly.size,
});
// Commit the moved files and refresh the status
await this.commitChanges({
deletedFiles: [...matches.fullMatches.values(), ...matches.deleteOnly.values()],
deployedFiles: [...matches.fullMatches.keys()],
message: 'Committing moved files',
});
movedFilesMarker?.stop();
}
}
exports.ShadowRepo = ShadowRepo;
const packageDirToRelativePosixPath = (projectPath) => (packageDir) => functions_2.IS_WINDOWS
? (0, functions_2.ensurePosix)(node_path_1.default.relative(projectPath, packageDir.fullPath))
: node_path_1.default.relative(projectPath, packageDir.fullPath);
const normalize = (filepath) => node_path_1.default.normalize(filepath);
const fileFilter = (packageDirs) => (f) =>
// no hidden files
!f.includes(`${node_path_1.default.sep}.`) &&
// no lwc tests
(0, functions_1.excludeLwcLocalOnlyTest)(f) &&
// no gitignore files
!f.endsWith('.gitignore') &&
// isogit uses `startsWith` for filepaths so it's possible to get a false positive
packageDirs.some((0, functions_1.folderContainsPath)(f));
//# sourceMappingURL=localShadowRepo.js.map