UNPKG

@salesforce/source-deploy-retrieve

Version:

JavaScript library to run Salesforce metadata deploys and retrieves

78 lines (77 loc) 4 kB
import { Readable, Transform, Writable, Stream } from 'node:stream'; import { JsonMap } from '@salesforce/ts-types'; import { Logger } from '@salesforce/core/logger'; import { SourceComponent } from '../resolve/sourceComponent'; import { SourcePath } from '../common/types'; import { ComponentSet } from '../collections/componentSet'; import { RegistryAccess } from '../registry/registryAccess'; import { FileResponseSuccess } from '../client/types'; import { ForceIgnore } from '../resolve'; import { ConvertContext } from './convertContext/convertContext'; import { SfdxFileFormat, WriterFormat } from './types'; export type PromisifiedPipeline = <T extends NodeJS.ReadableStream>(source: T, ...destinations: NodeJS.WritableStream[]) => Promise<void>; export declare const getPipeline: () => PromisifiedPipeline; export declare const stream2buffer: (stream: Stream) => Promise<Buffer>; export declare class ComponentConverter extends Transform { private targetFormat; private mergeSet?; private defaultDirectory?; readonly context: ConvertContext; private transformerFactory; constructor(targetFormat: SfdxFileFormat, registry: RegistryAccess, mergeSet?: ComponentSet | undefined, defaultDirectory?: string | undefined); _transform(chunk: SourceComponent, encoding: string, callback: (err: Error | undefined, data: WriterFormat) => void): Promise<void>; /** * Called at the end when all components have passed through the pipeline. Finalizers * take care of any additional work to be done at this stage e.g. recomposing child components. */ _flush(callback: (err: Error | undefined, data?: WriterFormat) => void): Promise<void>; } export declare abstract class ComponentWriter extends Writable { protected rootDestination?: SourcePath; protected logger: Logger; constructor(rootDestination?: SourcePath); } export declare class StandardWriter extends ComponentWriter { /** filepaths that converted files were written to */ readonly converted: string[]; readonly deleted: FileResponseSuccess[]; readonly forceignore: ForceIgnore; constructor(rootDestination: SourcePath); _write(chunk: WriterFormat, encoding: string, callback: (err?: Error) => void): Promise<void>; } export declare class ZipWriter extends ComponentWriter { /** * Count of files (not directories) added to the zip file. */ fileCount: number; private zip; private zipBuffer?; constructor(rootDestination?: SourcePath); get buffer(): Buffer | undefined; _write(chunk: WriterFormat, encoding: string, callback: (err?: Error) => void): Promise<void>; _final(callback: (err?: Error) => void): Promise<void>; addToZip(contents: string | Readable | Buffer, path: SourcePath): void; } /** * Convenient wrapper to serialize a js object to XML content. Implemented as a stream * to be used as a valid source for ComponentWriters in the conversion pipeline, * even though it's not beneficial in the typical way a stream is. */ export declare class JsToXml extends Readable { private xmlObject; constructor(xmlObject: JsonMap); _read(): void; } /** xmlBuilder likes to add newline and indent before/after the comment (hypothesis: it uses `<` as a hint to newlint/indent) */ export declare const correctComments: (xml: string) => string; /** * use this function to handle special html entities. * XmlBuilder will otherwise replace ex: `&#160;` with `'&amp;#160;'` (escape the &) * This is a separate function to allow for future handling of other special entities * * See https://github.com/NaturalIntelligence/fast-xml-parser/blob/fa5a7339a5ae2ca4aea8a256179b82464dbf510e/docs/v4/5.Entities.md * The parser can call addEntities to support more, but the Builder does not have that option. * You also can't use Builder.tagValueProcessor to use this function * because the escaping of `&` happens AFTER that is called. * */ export declare const handleSpecialEntities: (xml: string) => string;