UNPKG

@baseplate-dev/sync

Version:

Library for syncing Baseplate descriptions

110 lines 4.61 kB
import path from 'node:path'; import { CancelledSyncError } from '#src/errors.js'; import { cleanDeletedFiles } from './clean-deleted-files.js'; import { createCodebaseFileReaderFromDirectory } from './codebase-file-reader.js'; import { filterPostWriteCommands, runPostWriteCommands, sortPostWriteCommands, } from './post-write-commands/index.js'; import { FormatterError, prepareGeneratorFiles, PrepareGeneratorFilesError, } from './prepare-generator-files/index.js'; import { writeGeneratorFiles } from './write-generator-file/index.js'; /** * Write the generator output to the output directory * * @param output - The generator output to write * @param outputDirectory - The directory to write the output to * @param options - The write options * @returns The result of the write operation */ export async function writeGeneratorOutput(output, outputDirectory, options) { const { previousGeneratedPayload, generatedContentsDirectory, rerunCommands = [], logger = console, abortSignal, overwriteOptions, } = options ?? {}; // write files try { const workingCodebase = createCodebaseFileReaderFromDirectory(outputDirectory); const fileWriterContext = { formatters: output.globalFormatters, logger, outputDirectory, previousGeneratedPayload, previousWorkingCodebase: workingCodebase, mergeDriver: options?.mergeDriver, overwriteOptions, }; const { files, fileIdToRelativePathMap } = await prepareGeneratorFiles({ files: output.files, context: fileWriterContext, }); if (abortSignal?.aborted) throw new CancelledSyncError(); await writeGeneratorFiles({ fileOperations: files, outputDirectory, generatedContentsDirectory, }); // Clean up deleted files if we have previous generated contents const { relativePathsPendingDelete } = await cleanDeletedFiles({ outputDirectory, previousGeneratedPayload, currentFileIdToRelativePathMap: fileIdToRelativePathMap, }); if (abortSignal?.aborted) throw new CancelledSyncError(); const modifiedRelativePaths = new Set(files .filter((result) => result.mergedContents) .map((result) => result.relativePath)); const commandsToRun = filterPostWriteCommands(output.postWriteCommands, { modifiedRelativePaths, rerunCommands, }); const orderedCommands = sortPostWriteCommands(commandsToRun); const filesWithConflicts = [ ...files .filter((result) => result.hasConflict) .map((result) => ({ relativePath: result.relativePath, generatedConflictRelativePath: result.generatedConflictRelativePath, conflictType: 'merge-conflict', })), ...files .filter((result) => result.deletedInWorking) .map((result) => ({ relativePath: result.relativePath, conflictType: 'working-deleted', })), ...relativePathsPendingDelete.map((relativePath) => ({ relativePath, conflictType: 'generated-deleted', })), ]; // don't run commands if there are conflicts if (filesWithConflicts.length > 0) { return { failedCommands: orderedCommands.map((c) => ({ command: c.command, workingDir: path.join(outputDirectory, c.options?.workingDirectory ?? ''), })), fileIdToRelativePathMap, filesWithConflicts, }; } if (options?.skipCommands) { return { filesWithConflicts, failedCommands: [], fileIdToRelativePathMap, }; } const { failedCommands } = await runPostWriteCommands(orderedCommands, outputDirectory, logger); return { filesWithConflicts, failedCommands, fileIdToRelativePathMap, }; } catch (error) { if (error instanceof PrepareGeneratorFilesError && error.causes[0].cause instanceof FormatterError) { const formatterError = error.causes[0].cause; logger.info(`File Dump:\n${formatterError.fileContents}`); } throw error; } } //# sourceMappingURL=write-generator-output.js.map