UNPKG

@salesforce/source-deploy-retrieve

Version:

JavaScript library to run Salesforce metadata deploys and retrieves

183 lines 14.7 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.MetadataConverter = void 0; /* * Copyright (c) 2020, salesforce.com, inc. * All rights reserved. * Licensed under the BSD 3-Clause license. * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ const node_stream_1 = require("node:stream"); const node_path_1 = require("node:path"); const core_1 = require("@salesforce/core"); const graceful_fs_1 = require("graceful-fs"); const ts_types_1 = require("@salesforce/ts-types"); const metadataResolver_1 = require("../resolve/metadataResolver"); const fileSystemHandler_1 = require("../utils/fileSystemHandler"); const componentSet_1 = require("../collections/componentSet"); const types_1 = require("../collections/types"); const registryAccess_1 = require("../registry/registryAccess"); const streams_1 = require("./streams"); const replacements_1 = require("./replacements"); ; const messages = new core_1.Messages('@salesforce/source-deploy-retrieve', 'sdr', new Map([["md_request_fail", "Metadata API request failed: %s"], ["error_convert_invalid_format", "Invalid conversion format '%s'"], ["error_could_not_infer_type", "%s: Could not infer a metadata type"], ["error_unexpected_child_type", "Unexpected child metadata [%s] found for parent type [%s]"], ["noParent", "Could not find parent type for %s (%s)"], ["error_expected_source_files", "%s: Expected source files for type '%s'"], ["error_failed_convert", "Component conversion failed: %s"], ["error_merge_metadata_target_unsupported", "Merge convert for metadata target format currently unsupported"], ["error_missing_adapter", "Missing adapter '%s' for metadata type '%s'"], ["error_missing_transformer", "Missing transformer '%s' for metadata type '%s'"], ["error_missing_type_definition", "Missing metadata type definition in registry for id '%s'."], ["error_missing_child_type_definition", "Type %s does not have a child type definition %s."], ["noChildTypes", "No child types found in registry for %s (reading %s at %s)"], ["error_no_metadata_xml_ignore", "Metadata xml file %s is forceignored but is required for %s."], ["noSourceIgnore", "%s metadata types require source files, but %s is forceignored."], ["noSourceIgnore.actions", "- Metadata types with content are composed of two files: a content file (ie MyApexClass.cls) and a -meta.xml file (i.e MyApexClass.cls-meta.xml). You must include both files in your .forceignore file. Or try appending \u201C\\*\u201D to your existing .forceignore entry.\n\nSee <https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_exclude_source.htm> for examples"], ["error_path_not_found", "%s: File or folder not found"], ["noContentFound", "SourceComponent %s (metadata type = %s) is missing its content file."], ["noContentFound.actions", ["Ensure the content file exists in the expected location.", "If the content file is in your .forceignore file, ensure the meta-xml file is also ignored to completely exclude it."]], ["error_parsing_xml", "SourceComponent %s (metadata type = %s) does not have an associated metadata xml to parse"], ["error_expected_file_path", "%s: path is to a directory, expected a file"], ["error_expected_directory_path", "%s: path is to a file, expected a directory"], ["error_directory_not_found_or_not_directory", "%s: path is not a directory"], ["error_no_directory_stream", "%s doesn't support readable streams on directories."], ["error_no_source_to_deploy", "No source-backed components present in the package."], ["error_no_components_to_retrieve", "No components in the package to retrieve."], ["error_static_resource_expected_archive_type", "A StaticResource directory must have a content type of application/zip or application/jar - found %s for %s."], ["error_static_resource_missing_resource_file", "A StaticResource must have an associated .resource file, missing %s.resource-meta.xml"], ["error_no_job_id", "The %s operation is missing a job ID. Initialize an operation with an ID, or start a new job."], ["missingApiVersion", "Could not determine an API version to use for the generated manifest. Tried looking for sourceApiVersion in sfdx-project.json, apiVersion from config vars, and the highest apiVersion from the APEX REST endpoint. Using API version 58.0 as a last resort."], ["invalid_xml_parsing", "error parsing %s due to:\\n message: %s\\n line: %s\\n code: %s"], ["zipBufferError", "Zip buffer was not created during conversion"], ["undefinedComponentSet", "Unable to construct a componentSet. Check the logs for more information."], ["replacementsFileNotRead", "The file \"%s\" specified in the \"replacements\" property of sfdx-project.json could not be read."], ["unsupportedBundleType", "Unsupported Bundle Type: %s"], ["filePathGeneratorNoTypeSupport", "Type not supported for filepath generation: %s"], ["missingFolderType", "The registry has %s as is inFolder but it does not have a folderType"], ["tooManyFiles", "Multiple files found for path: %s."], ["cantGetName", "Unable to calculate fullName from path: %s (%s)"], ["missingMetaFileSuffix", "The metadata registry is configured incorrectly for %s. Expected a metaFileSuffix."], ["uniqueIdElementNotInRegistry", "No uniqueIdElement found in registry for %s (reading %s at %s)."], ["uniqueIdElementNotInChild", "The uniqueIdElement %s was not found the child (reading %s at %s)."], ["suggest_type_header", "A metadata type lookup for \"%s\" found the following close matches:"], ["suggest_type_did_you_mean", "-- Did you mean \".%s%s\" instead for the \"%s\" metadata type?"], ["suggest_type_more_suggestions", "Additional suggestions:\nConfirm the file name, extension, and directory names are correct. Validate against the registry at:\n<https://github.com/forcedotcom/source-deploy-retrieve/blob/main/src/registry/metadataRegistry.json>\n\nIf the type is not listed in the registry, check that it has Metadata API support via the Metadata Coverage Report:\n<https://developer.salesforce.com/docs/metadata-coverage>\n\nIf the type is available via Metadata API but not in the registry\n\n- Open an issue <https://github.com/forcedotcom/cli/issues>\n- Add the type via PR. Instructions: <https://github.com/forcedotcom/source-deploy-retrieve/blob/main/contributing/metadata.md>"], ["type_name_suggestions", "Confirm the metadata type name is correct. Validate against the registry at:\n<https://github.com/forcedotcom/source-deploy-retrieve/blob/main/src/registry/metadataRegistry.json>\n\nIf the type is not listed in the registry, check that it has Metadata API support via the Metadata Coverage Report:\n<https://developer.salesforce.com/docs/metadata-coverage>\n\nIf the type is available via Metadata API but not in the registry\n\n- Open an issue <https://github.com/forcedotcom/cli/issues>\n- Add the type via PR. Instructions: <https://github.com/forcedotcom/source-deploy-retrieve/blob/main/contributing/metadata.md>"]])); class MetadataConverter { static PACKAGE_XML_FILE = 'package.xml'; static DESTRUCTIVE_CHANGES_POST_XML_FILE = 'destructiveChangesPost.xml'; static DESTRUCTIVE_CHANGES_PRE_XML_FILE = 'destructiveChangesPre.xml'; static DEFAULT_PACKAGE_PREFIX = 'metadataPackage'; registry; constructor(registry = new registryAccess_1.RegistryAccess()) { this.registry = registry; } async convert(comps, targetFormat, output) { try { const cs = comps instanceof componentSet_1.ComponentSet ? comps : new componentSet_1.ComponentSet(comps, this.registry); const components = (comps instanceof componentSet_1.ComponentSet ? Array.from(comps.getSourceComponents()) : comps).filter((comp) => comp.type.isAddressable !== false); if (output.type !== 'merge' && output.packageName) { cs.fullName = output.packageName; } const targetFormatIsSource = targetFormat === 'source'; const { packagePath, defaultDirectory, writer, mergeSet, tasks = [], } = await getConvertIngredients(output, cs, targetFormatIsSource, this.registry); const conversionPipeline = (0, streams_1.pipeline)(node_stream_1.Readable.from(components), !targetFormatIsSource && (process.env.SF_APPLY_REPLACEMENTS_ON_CONVERT === 'true' || output.type === 'zip') ? (await (0, replacements_1.getReplacementMarkingStream)(cs.projectDirectory)) ?? new node_stream_1.PassThrough({ objectMode: true }) : new node_stream_1.PassThrough({ objectMode: true }), new streams_1.ComponentConverter(targetFormat, this.registry, mergeSet, defaultDirectory), writer); await Promise.all([conversionPipeline, ...tasks]); return await getResult(this.registry)(packagePath)(writer); } catch (err) { if (!(err instanceof Error) && !(0, ts_types_1.isString)(err)) { throw err; } // if the error is already somewhat descriptive, use that // the allows better error messages to be passed through instead of "failed convert" if (err instanceof core_1.SfError && (err.name !== 'SfError' || err.actions)) { throw err; } const error = (0, ts_types_1.isString)(err) ? new Error(err) : err; throw new core_1.SfError(messages.getMessage('error_failed_convert', [error.message]), 'ConversionError', [], error); } } } exports.MetadataConverter = MetadataConverter; const getResult = (registry) => (packagePath) => async (writer) => { // union type discrimination if ('addToZip' in writer) { const buffer = writer.buffer; if (!packagePath) { return { packagePath, zipBuffer: buffer, zipFileCount: writer.fileCount }; } else if (buffer) { await graceful_fs_1.promises.writeFile(packagePath, buffer); return { packagePath }; } } else if (writer.converted?.length > 0 || writer.deleted?.length > 0) { const resolver = new metadataResolver_1.MetadataResolver(registry); return { packagePath, converted: writer.converted.flatMap((f) => resolver.getComponentsFromPath(f)), deleted: writer.deleted, }; } return { packagePath, converted: [], deleted: [] }; }; function getPackagePath(outputConfig) { let packagePath; const { genUniqueDir = true, outputDirectory, packageName, type } = outputConfig; if (outputDirectory) { if (packageName) { packagePath = (0, node_path_1.join)(outputDirectory, packageName); } else if (genUniqueDir) { packagePath = (0, node_path_1.join)(outputDirectory, `${MetadataConverter.DEFAULT_PACKAGE_PREFIX}_${Date.now()}`); } else { packagePath = (0, node_path_1.normalize)(outputDirectory); } if (type === 'zip') { packagePath += '.zip'; (0, fileSystemHandler_1.ensureDirectoryExists)((0, node_path_1.dirname)(packagePath)); } else { (0, fileSystemHandler_1.ensureDirectoryExists)(packagePath); } } return packagePath; } const getDestructiveManifest = (destructiveChangesType) => { switch (destructiveChangesType) { case types_1.DestructiveChangesType.POST: return MetadataConverter.DESTRUCTIVE_CHANGES_POST_XML_FILE; case types_1.DestructiveChangesType.PRE: return MetadataConverter.DESTRUCTIVE_CHANGES_PRE_XML_FILE; } }; async function getConvertIngredients(output, cs, targetFormatIsSource, registry) { switch (output.type) { case 'directory': return getDirectoryConfigOutputs(output, targetFormatIsSource, cs); case 'zip': return getZipConfigOutputs(output, targetFormatIsSource, cs); case 'merge': return getMergeConfigOutputs(output, targetFormatIsSource, registry); } } function getMergeConfigOutputs(output, targetFormatIsSource, registry) { if (!targetFormatIsSource) { throw new core_1.SfError(messages.getMessage('error_merge_metadata_target_unsupported')); } const defaultDirectory = output.defaultDirectory; const mergeSet = new componentSet_1.ComponentSet(undefined, registry); for (const component of output.mergeWith) { if (component.type.strategies?.adapter === 'digitalExperience' && !component.parent?.content) { // DE is addressable without its parent (DEB) mergeSet.add(component); } else { // since child components are composed in metadata format, we need to merge using the parent mergeSet.add(component.parent ?? component); } } const writer = new streams_1.StandardWriter(output.defaultDirectory); return { writer, mergeSet, defaultDirectory, }; } async function getZipConfigOutputs(output, targetFormatIsSource, cs) { const packagePath = getPackagePath(output); const writer = new streams_1.ZipWriter(packagePath); if (!targetFormatIsSource) { writer.addToZip(await cs.getPackageXml(), MetadataConverter.PACKAGE_XML_FILE); // for each of the destructive changes in the component set, convert and write the correct metadata to each manifest await Promise.all(cs .getTypesOfDestructiveChanges() .map(async (destructiveChangeType) => writer.addToZip(await cs.getPackageXml(4, destructiveChangeType), getDestructiveManifest(destructiveChangeType)))); } return { packagePath, defaultDirectory: packagePath, writer, mergeSet: undefined, }; } async function getDirectoryConfigOutputs(output, targetFormatIsSource, cs) { const packagePath = getPackagePath(output); return { packagePath, defaultDirectory: packagePath, writer: new streams_1.StandardWriter(packagePath), tasks: targetFormatIsSource ? [] : [ graceful_fs_1.promises.writeFile((0, node_path_1.join)(packagePath, MetadataConverter.PACKAGE_XML_FILE), await cs.getPackageXml()), ...cs.getTypesOfDestructiveChanges().map(async (destructiveChangesType) => // for each of the destructive changes in the component set, convert and write the correct metadata // to each manifest graceful_fs_1.promises.writeFile((0, node_path_1.join)(packagePath, getDestructiveManifest(destructiveChangesType)), await cs.getPackageXml(4, destructiveChangesType))), ], }; } //# sourceMappingURL=metadataConverter.js.map