UNPKG

apex-code-coverage-transformer

Version:

Transform Salesforce Apex code coverage JSONs into other formats accepted by SonarQube, GitHub, GitLab, Azure, Bitbucket, etc.

100 lines 4.18 kB
/* eslint-disable no-await-in-loop */ import { readFile } from 'node:fs/promises'; import { mapLimit } from 'async'; import { getCoverageHandler } from '../handlers/getHandler.js'; import { getPackageDirectories } from '../utils/getPackageDirectories.js'; import { findFilePath } from '../utils/findFilePath.js'; import { setCoveredLines } from '../utils/setCoveredLines.js'; import { getConcurrencyThreshold } from '../utils/getConcurrencyThreshold.js'; import { checkCoverageDataType } from '../utils/setCoverageDataType.js'; import { generateAndWriteReport } from './reportGenerator.js'; export async function transformCoverageReport(jsonFilePath, outputReportPath, formats, ignoreDirs) { const warnings = []; const finalPaths = []; const formatAmount = formats.length; let filesProcessed = 0; const jsonData = await tryReadJson(jsonFilePath, warnings); if (!jsonData) return { finalPaths: [outputReportPath], warnings }; const parsedData = JSON.parse(jsonData); const { repoRoot, packageDirectories } = await getPackageDirectories(ignoreDirs); const handlers = createHandlers(formats); const commandType = checkCoverageDataType(parsedData); const concurrencyLimit = getConcurrencyThreshold(); const context = { handlers, packageDirs: packageDirectories, repoRoot, concurrencyLimit, warnings, }; if (commandType === 'DeployCoverageData') { filesProcessed = await processDeployCoverage(parsedData, context); } else if (commandType === 'TestCoverageData') { filesProcessed = await processTestCoverage(parsedData, context); } else { throw new Error('The provided JSON does not match a known coverage data format from the Salesforce deploy or test command.'); } if (filesProcessed === 0) { warnings.push('None of the files listed in the coverage JSON were processed. The coverage report will be empty.'); } for (const [format, handler] of handlers.entries()) { const coverageObj = handler.finalize(); const finalPath = await generateAndWriteReport(outputReportPath, coverageObj, format, formatAmount); finalPaths.push(finalPath); } return { finalPaths, warnings }; } async function tryReadJson(path, warnings) { try { return await readFile(path, 'utf-8'); } catch { warnings.push(`Failed to read ${path}. Confirm file exists.`); return null; } } function createHandlers(formats) { const handlers = new Map(); for (const format of formats) { handlers.set(format, getCoverageHandler(format)); } return handlers; } async function processDeployCoverage(data, context) { let processed = 0; await mapLimit(Object.keys(data), context.concurrencyLimit, async (fileName) => { const fileInfo = data[fileName]; const formattedName = fileName.replace(/no-map[\\/]+/, ''); const path = await findFilePath(formattedName, context.packageDirs, context.repoRoot); if (!path) { context.warnings.push(`The file name ${formattedName} was not found in any package directory.`); return; } fileInfo.s = await setCoveredLines(path, context.repoRoot, fileInfo.s); for (const handler of context.handlers.values()) { handler.processFile(path, formattedName, fileInfo.s); } processed++; }); return processed; } async function processTestCoverage(data, context) { let processed = 0; await mapLimit(data, context.concurrencyLimit, async (entry) => { const formattedName = entry.name.replace(/no-map[\\/]+/, ''); const path = await findFilePath(formattedName, context.packageDirs, context.repoRoot); if (!path) { context.warnings.push(`The file name ${formattedName} was not found in any package directory.`); return; } for (const handler of context.handlers.values()) { handler.processFile(path, formattedName, entry.lines); } processed++; }); return processed; } //# sourceMappingURL=coverageTransformer.js.map