@salesforce/source-deploy-retrieve
Version:
JavaScript library to run Salesforce metadata deploys and retrieves
289 lines • 13 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.JsToXml = exports.ZipWriter = exports.StandardWriter = exports.ComponentWriter = exports.ComponentConverter = exports.stream2buffer = exports.getPipeline = void 0;
/*
* Copyright 2025, Salesforce, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const node_path_1 = require("node:path");
const node_stream_1 = require("node:stream");
const node_util_1 = require("node:util");
const jszip_1 = __importDefault(require("jszip"));
const graceful_fs_1 = require("graceful-fs");
const fast_xml_parser_1 = require("fast-xml-parser");
const logger_1 = require("@salesforce/core/logger");
const constants_1 = require("../common/constants");
const fileSystemHandler_1 = require("../utils/fileSystemHandler");
const types_1 = require("../client/types");
const resolve_1 = require("../resolve");
const metadataTransformerFactory_1 = require("./transformers/metadataTransformerFactory");
const convertContext_1 = require("./convertContext/convertContext");
let promisifiedPipeline; // store it so we don't have to promisify every time
const getPipeline = () => {
promisifiedPipeline ??= (0, node_util_1.promisify)(node_stream_1.pipeline);
return promisifiedPipeline;
};
exports.getPipeline = getPipeline;
const stream2buffer = async (stream) => new Promise((resolve, reject) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const buf = Array();
stream.on('data', (chunk) => buf.push(chunk));
stream.on('end', () => resolve(Buffer.concat(buf)));
stream.on('error', (err) => reject(`error converting stream - ${err}`));
});
exports.stream2buffer = stream2buffer;
class ComponentConverter extends node_stream_1.Transform {
targetFormat;
mergeSet;
defaultDirectory;
context = new convertContext_1.ConvertContext();
transformerFactory;
constructor(targetFormat, registry, mergeSet, defaultDirectory) {
super({ objectMode: true });
this.targetFormat = targetFormat;
this.mergeSet = mergeSet;
this.defaultDirectory = defaultDirectory;
this.transformerFactory = new metadataTransformerFactory_1.MetadataTransformerFactory(registry, this.context);
}
async _transform(chunk, encoding, callback) {
let err;
const writeInfos = [];
// Only transform components not marked for delete.
if (!chunk.isMarkedForDelete()) {
try {
const converts = [];
const transformer = this.transformerFactory.getTransformer(chunk);
transformer.defaultDirectory = this.defaultDirectory;
if (this.targetFormat === 'source') {
const mergeWith = this.mergeSet?.getSourceComponents(chunk);
if (mergeWith) {
for (const mergeComponent of mergeWith) {
converts.push(transformer.toSourceFormat({ component: chunk, mergeWith: mergeComponent, mergeSet: this.mergeSet }));
}
}
if (converts.length === 0) {
converts.push(transformer.toSourceFormat({ component: chunk, mergeSet: this.mergeSet }));
}
}
else if (this.targetFormat === 'metadata') {
converts.push(transformer.toMetadataFormat(chunk));
}
// could maybe improve all this with lazy async collections...
(await Promise.all(converts)).forEach((infos) => writeInfos.push(...infos));
}
catch (e) {
err = e;
}
}
callback(err, { component: chunk, writeInfos });
}
/**
* Called at the end when all components have passed through the pipeline. Finalizers
* take care of any additional work to be done at this stage e.g. recomposing child components.
*/
async _flush(callback) {
let err;
try {
for await (const finalizerResult of this.context.executeFinalizers(this.defaultDirectory)) {
finalizerResult.forEach((result) => this.push(result));
}
}
catch (e) {
err = e;
}
callback(err);
}
}
exports.ComponentConverter = ComponentConverter;
class ComponentWriter extends node_stream_1.Writable {
rootDestination;
logger;
constructor(rootDestination) {
super({ objectMode: true });
this.rootDestination = rootDestination;
this.logger = logger_1.Logger.childFromRoot(this.constructor.name);
}
}
exports.ComponentWriter = ComponentWriter;
class StandardWriter extends ComponentWriter {
/** filepaths that converted files were written to */
converted = [];
deleted = [];
forceignore;
constructor(rootDestination) {
super(rootDestination);
this.forceignore = resolve_1.ForceIgnore.findAndCreate(rootDestination);
}
async _write(chunk, encoding, callback) {
let err;
if (chunk.writeInfos.length !== 0) {
try {
const toResolve = new Set();
// it is a reasonable expectation that when a conversion call exits, the files of
// every component has been written to the destination. This await ensures the microtask
// queue is empty when that call exits and overall less memory is consumed.
await Promise.all(chunk.writeInfos
.map(makeWriteInfoAbsolute(this.rootDestination))
.filter(existsOrDoesntMatchIgnored(this.forceignore, this.logger)) // Skip files matched by default ignore
.map(async (info) => {
if (info.shouldDelete) {
this.deleted.push({
filePath: info.output,
state: types_1.ComponentStatus.Deleted,
type: info.type,
fullName: info.fullName,
});
return graceful_fs_1.promises.rm(info.output, { force: true, recursive: true });
}
// if there are children, resolve each file. o/w just pick one of the files to resolve
// "resolve" means "make these show up in the FileResponses"
if (toResolve.size === 0 ||
chunk.component.type.children !== undefined ||
// make each decomposed label show up in the fileResponses
chunk.component.type.strategies?.transformer === 'decomposedLabels') {
// This is a workaround for a server side ListViews bug where
// duplicate components are sent. W-9614275
if (toResolve.has(info.output)) {
this.logger.debug(`Ignoring duplicate metadata for: ${info.output}`);
return;
}
toResolve.add(info.output);
}
await (0, fileSystemHandler_1.ensureFileExists)(info.output);
return (0, exports.getPipeline)()(info.source, (0, graceful_fs_1.createWriteStream)(info.output));
}));
this.converted.push(...toResolve);
}
catch (e) {
err = e;
}
}
callback(err);
}
}
exports.StandardWriter = StandardWriter;
class ZipWriter extends ComponentWriter {
/**
* Count of files (not directories) added to the zip file.
*/
fileCount = 0;
zip = (0, jszip_1.default)();
zipBuffer;
constructor(rootDestination) {
super(rootDestination);
const destination = rootDestination ? `for: ${rootDestination}` : 'in memory';
this.logger.debug(`generating zip ${destination}`);
}
get buffer() {
return this.zipBuffer;
}
async _write(chunk, encoding, callback) {
let err;
try {
await Promise.all(chunk.writeInfos.filter(isWriteInfoWithSource).map(async (writeInfo) => {
// we don't want to prematurely zip folder types when their children might still be not in the zip
// those files we'll leave open as ReadableStreams until the zip finalizes
if (Boolean(chunk.component.type.folderType) || Boolean(chunk.component.type.folderContentType)) {
return this.addToZip(writeInfo.source, writeInfo.output);
}
// everything else can be zipped immediately to reduce the number of open files (windows has a low limit!) and help perf
const streamAsBuffer = await (0, exports.stream2buffer)(writeInfo.source);
return this.addToZip(streamAsBuffer, writeInfo.output);
}));
}
catch (e) {
err = e;
}
callback(err);
}
async _final(callback) {
let err;
try {
this.zipBuffer = await this.zip.generateAsync({
type: 'nodebuffer',
compression: 'DEFLATE',
compressionOptions: { level: 3 },
});
this.logger.debug('Generated zip complete');
}
catch (e) {
err = e;
}
callback(err);
}
addToZip(contents, path) {
// Ensure only posix paths are added to zip files
const posixPath = path.replace(/\\/g, '/');
this.zip.file(posixPath, contents);
this.fileCount++;
}
}
exports.ZipWriter = ZipWriter;
/**
* Convenient wrapper to serialize a js object to XML content. Implemented as a stream
* to be used as a valid source for ComponentWriters in the conversion pipeline,
* even though it's not beneficial in the typical way a stream is.
*/
class JsToXml extends node_stream_1.Readable {
xmlObject;
constructor(xmlObject) {
super();
this.xmlObject = xmlObject;
}
_read() {
const builder = new fast_xml_parser_1.XMLBuilder({
format: true,
indentBy: ' ',
ignoreAttributes: false,
cdataPropName: '__cdata',
commentPropName: constants_1.XML_COMMENT_PROP_NAME,
});
const builtXml = String(builder.build(this.xmlObject));
const xmlContent = correctComments(constants_1.XML_DECL.concat(handleSpecialEntities(builtXml)));
this.push(xmlContent);
this.push(null);
}
}
exports.JsToXml = JsToXml;
/** xmlBuilder likes to add newline and indent before/after the comment (hypothesis: it uses `<` as a hint to newlint/indent) */
const correctComments = (xml) => xml.includes('<!--') ? xml.replace(/\s+<!--(.*?)-->\s+/g, '<!--$1-->') : xml;
/**
* use this function to handle special html entities.
* XmlBuilder will otherwise replace ex: ` ` with `'&#160;'` (escape the &)
* This is a separate function to allow for future handling of other special entities
*
* See https://github.com/NaturalIntelligence/fast-xml-parser/blob/fa5a7339a5ae2ca4aea8a256179b82464dbf510e/docs/v4/5.Entities.md
* The parser can call addEntities to support more, but the Builder does not have that option.
* You also can't use Builder.tagValueProcessor to use this function
* because the escaping of `&` happens AFTER that is called.
* */
const handleSpecialEntities = (xml) => xml.replaceAll('&#160;', ' ');
/** discriminate between the shouldDelete and the regular WriteInfo */
const isWriteInfoWithSource = (writeInfo) => writeInfo.source !== undefined;
const makeWriteInfoAbsolute = (rootDestination = '') => (writeInfo) => ({
...writeInfo,
output: (0, node_path_1.isAbsolute)(writeInfo.output) ? writeInfo.output : (0, node_path_1.join)(rootDestination, writeInfo.output),
});
const existsOrDoesntMatchIgnored = (forceignore, logger) => (writeInfo) => {
const result = (0, graceful_fs_1.existsSync)(writeInfo.output) || forceignore.accepts(writeInfo.output);
// Detect if file was ignored by .forceignore patterns
if (!result) {
logger.debug(`File ${writeInfo.output} was ignored or not exists`);
}
return result;
};
//# sourceMappingURL=streams.js.map