mdat
Version:
CLI tool and TypeScript library implementing the Markdown Autophagic Template (MDAT) system. MDAT lets you use comments as dynamic content templates in Markdown files, making it easy to generate and update readme boilerplate.
818 lines (817 loc) • 30.6 kB
JavaScript
import { cosmiconfig, defaultLoaders } from "cosmiconfig";
import { TypeScriptLoader } from "cosmiconfig-typescript-loader";
import fs from "node:fs/promises";
import path from "node:path";
import picocolors from "picocolors";
import plur from "plur";
import { readPackage } from "read-pkg";
import { deepMergeDefined, getSoleRule, log, mdatCheck, mdatClean, mdatExpand, mdatSplit, optionsSchema, rulesSchema } from "remark-mdat";
import { z } from "zod";
import { packageUp } from "package-up";
import { isFile, isFileSync } from "path-type";
import { Configuration } from "unified-engine";
import untildify from "untildify";
import { remark } from "remark";
import remarkGfm from "remark-gfm";
import { read } from "to-vfile";
import { VFile } from "vfile";
import { globby } from "globby";
import { promisify } from "node:util";
import { brotliCompress, gzip } from "node:zlib";
import prettyBytes from "pretty-bytes";
import { toc } from "mdast-util-toc";
import { findUp } from "find-up";
import { packageDirectory } from "package-directory";
//#region src/lib/mdat-json-loader.ts
/**
* Lets arbitrary JSON objects (like from package.json) become reasonably good mdat rule sets
* HOWEVER cosmiconfig treats package.json as a special case and will always load only specific keys from it
* So we have to intercept and load them manually in config.ts
*/
function mdatJsonLoader(filePath, content) {
const defaultJsonLoader = defaultLoaders[".json"];
return flattenJson(defaultJsonLoader(filePath, content));
}
function flattenJson(jsonObject, parentKey = "", result = {}) {
for (const [key, value] of Object.entries(jsonObject)) {
const fullPath = parentKey ? `${parentKey}.${key}` : key;
if (typeof value === "object" && value !== null && !Array.isArray(value)) flattenJson(value, fullPath, result);
else if (value === null) result[fullPath] = "null";
else result[fullPath] = value.toString();
}
return result;
}
//#endregion
//#region src/lib/utilities.ts
function zeroPad(n, nMax) {
const places = nMax === 0 ? 1 : Math.floor(Math.log10(Math.abs(nMax)) + 1);
return n.toString().padStart(places, "0");
}
async function getInputOutputPaths(inputs, output, name, extension) {
const paths = [];
for (const [index, file] of inputs.entries()) {
const inputOutputPath = await getInputOutputPath(file, output, name, extension, name && inputs.length > 1 ? `-${zeroPad(index + 1, inputs.length)}` : "");
paths.push(inputOutputPath);
}
return paths;
}
async function getInputOutputPath(input, output, name, extension, nameSuffix = "") {
const resolvedInput = expandPath(input);
const resolvedOutput = output ? expandPath(output) : void 0;
if (!isFileSync(resolvedInput)) throw new Error(`Input file not found: "${resolvedInput}"`);
if (resolvedOutput) {
if (isFileSync(resolvedOutput)) throw new Error(`Output path must be a directory, received a file path: "${resolvedOutput}"`);
await fs.mkdir(resolvedOutput, { recursive: true });
}
return {
input: resolvedInput,
name: `${name ? path.basename(name, path.extname(name)) : path.basename(resolvedInput, path.extname(resolvedInput))}${nameSuffix}${`.${extension ?? (name && path.extname(name) !== "" ? path.extname(name) : path.extname(input) === "" ? "" : path.extname(input))}`}`,
output: resolvedOutput ?? path.dirname(resolvedInput)
};
}
function expandPath(file) {
return untildify(file);
}
async function findPackage() {
return packageUp();
}
function ensureArray(value) {
if (value === void 0 || value === null) return [];
return Array.isArray(value) ? value : [value];
}
async function loadAmbientRemarkConfig() {
const ambientConfig = new Configuration({
cwd: process.cwd(),
detectConfig: true,
packageField: "remarkConfig",
rcName: ".remarkrc"
});
const configResult = await new Promise((resolve) => {
ambientConfig.load("", (error, result) => {
if (error) {
log.error(String(error));
resolve(void 0);
return;
}
resolve(result);
});
});
if (configResult) {
const { filePath } = configResult;
if (filePath === void 0) log.info("No ambient Remark configuration file found");
else log.info(`Found and loaded ambient Remark configuration from "${filePath}"`);
return configResult;
}
log.info("No ambient Remark configuration found");
return {
filePath: void 0,
plugins: [],
settings: {}
};
}
//#endregion
//#region src/lib/config.ts
const configSchema = optionsSchema.merge(z.object({
assetsPath: z.string().optional(),
packageFile: z.string().optional()
})).describe("Config Extension");
/**
* Load and validate mdat configuration / rule sets
* Uses cosmiconfig to search in the usual places.
* Merge precedence: Base Defaults < Readme Defaults < Searched Config < Additional Config Paths
*
* Generic to accommodate additional Config options, so set T to your custom config type if needed. You must provide a matching configExtensionSchema as well.
*/
async function loadConfig(options) {
const { additionalConfig, additionalRules, readmeDefaults, searchFrom } = options ?? {};
config = void 0;
packageJson = void 0;
let finalConfig = {
addMetaComment: false,
assetsPath: "./assets",
closingPrefix: "/",
keywordPrefix: "",
metaCommentIdentifier: "+",
packageFile: await findPackage(),
rules: { mdat: `Powered by the Markdown Autophagic Template system: [mdat](https://github.com/kitschpatrol/mdat).` }
};
if (readmeDefaults) finalConfig = deepMergeDefined(finalConfig, readmeDefaults);
const configExplorer = cosmiconfig("mdat", { loaders: { ".ts": TypeScriptLoader() } });
const results = await configExplorer.search(searchFrom);
if (results) {
const { config, filepath } = results;
let possibleConfig = config;
log.info(`Using config from "${filepath}"`);
if (filepath.endsWith("package.json") && typeof config === "string") {
log.info(`Detected shared config string: "${config}"`);
const { default: sharedConfig } = await import(config);
possibleConfig = sharedConfig;
}
const configFromObject = getAndValidateConfigFromObject(possibleConfig, configSchema);
if (configFromObject) finalConfig = deepMergeDefined(finalConfig, configFromObject);
}
if (additionalConfig !== void 0) {
const additionalConfigsArray = Array.isArray(additionalConfig) ? additionalConfig : [additionalConfig];
for (const configOrPath of additionalConfigsArray) {
let config;
if (typeof configOrPath === "string") {
const results = await configExplorer.load(configOrPath);
if (results === null || results === void 0) continue;
const { config: loadedConfig, filepath } = results;
log.info(`Loaded additional config from "${filepath}"`);
config = loadedConfig;
} else config = configOrPath;
if (config === void 0) continue;
log.info("Merging configuration object");
const configFromObject = getAndValidateConfigFromObject(config, configSchema);
if (configFromObject !== void 0) finalConfig = deepMergeDefined(finalConfig, configFromObject);
}
}
if (additionalRules !== void 0) {
const additionalRulesArray = Array.isArray(additionalRules) ? additionalRules : [additionalRules];
const rulesExplorer = cosmiconfig("mdat", { loaders: {
".json": mdatJsonLoader,
".ts": TypeScriptLoader()
} });
for (const rulesOrPath of additionalRulesArray) {
let rules;
if (typeof rulesOrPath === "string") {
let results;
if (path.basename(rulesOrPath).endsWith("package.json")) results = {
config: mdatJsonLoader(rulesOrPath, await fs.readFile(rulesOrPath, "utf8")),
filepath: rulesOrPath
};
else results = await rulesExplorer.load(rulesOrPath);
if (results === null || results === void 0) continue;
const { config: loadedRules, filepath } = results;
log.info(`Loaded additional config from "${filepath}"`);
rules = loadedRules;
} else rules = rulesOrPath;
if (rules === void 0) continue;
log.info("Merging rules into configuration object");
const configFromRulesObject = getAndValidateConfigFromRulesObject(rules, rulesSchema);
if (configFromRulesObject !== void 0) finalConfig = deepMergeDefined(finalConfig, configFromRulesObject);
}
}
if (finalConfig.rules) {
const prettyRules = Object.keys(finalConfig.rules).toSorted().map((rule) => `"${picocolors.green(picocolors.bold(rule))}"`);
log.info(`Loaded ${picocolors.bold(prettyRules.length)} mdat comment expansion ${plur("rule", prettyRules.length)}:`);
for (const rule of prettyRules) log.info(`\t${rule}`);
} else log.error("No rules loaded from additional configurations or rules, using default.");
config = finalConfig;
return finalConfig;
}
function getAndValidateConfigFromRulesObject(rulesObject, rulesSchema) {
if (rulesSchema.safeParse(rulesObject).success) return { rules: rulesObject };
log.error(`Rules object has the wrong shape. Ignoring and using default configuration:\n${JSON.stringify(rulesObject, void 0, 2)}`);
}
function getAndValidateConfigFromObject(configObject, configSchema) {
if (configSchema.safeParse(configObject).success) return configObject;
log.error(`Config object has the wrong shape. Ignoring and using default configuration:\n${JSON.stringify(configObject, void 0, 2)}`);
}
let config;
/**
* Get the current MDAT config object, loading it if necessary
*/
async function getConfig() {
if (config === void 0) {
log.warn("getConfig(): config was undefined");
config ??= await loadConfig();
}
return config;
}
let packageJson;
/**
* Convenience function for rules
* Load as package json only as needed, memoize
* Rules could call this themselves, but this is more convenient and efficient
* @throws {Error} If no package.json is found
*/
async function getPackageJson() {
const { packageFile } = await getConfig();
if (packageFile === void 0) throw new Error("No packageFile found or set in config");
packageJson ??= await readPackage({ cwd: path.dirname(packageFile) });
if (packageJson === void 0) throw new Error("No package.json found");
return packageJson;
}
/**
* Convenience function for merging configs
* Performs a deep merge, with the rightmost object taking precedence
*/
function mergeConfigs(a, b) {
return deepMergeDefined(a, b);
}
//#endregion
//#region src/lib/processors.ts
async function processFiles(files, loader, processorGetter, name, output, config, rules) {
const resolvedConfig = await loader({
additionalConfig: config,
additionalRules: rules
});
const localRemarkConfiguration = await loadAmbientRemarkConfig();
const inputOutputPaths = await getInputOutputPaths(ensureArray(files), output, name, "md");
const results = [];
const resolvedProcessor = processorGetter(resolvedConfig, localRemarkConfiguration);
for (const { input, name, output } of inputOutputPaths) {
const inputFile = await read(input);
const result = await resolvedProcessor.process(inputFile);
result.dirname = output;
result.basename = name;
results.push(result);
}
return results;
}
async function processString(markdown, loader, processorGetter, config, rules) {
return processorGetter(await loader({
additionalConfig: config,
additionalRules: rules
}), await loadAmbientRemarkConfig()).process(new VFile(markdown));
}
function getExpandProcessor(options, ambientRemarkConfig) {
return remark().use({ settings: {
bullet: "-",
emphasis: "_"
} }).use(remarkGfm).use(ambientRemarkConfig).use(() => async function(tree, file) {
mdatSplit(tree, file);
mdatClean(tree, file, options);
await mdatExpand(tree, file, options);
});
}
function getCleanProcessor(options, ambientRemarkConfig) {
return remark().use({ settings: {
bullet: "-",
emphasis: "_"
} }).use(remarkGfm).use(ambientRemarkConfig).use(() => function(tree, file) {
mdatSplit(tree, file);
mdatClean(tree, file, options);
});
}
function getCheckProcessor(options, ambientRemarkConfig) {
return remark().use({ settings: {
bullet: "-",
emphasis: "_"
} }).use(remarkGfm).use(ambientRemarkConfig).use(() => async function(tree, file) {
await mdatCheck(tree, file, {
...options,
paranoid: false
});
});
}
//#endregion
//#region src/lib/api.ts
/**
* Expand MDAT comments in one or more Markdown files
* Writing is the responsibility of the caller (e.g. via `await write(result)`)
* @returns an array of VFiles (Even if you only pass a single file path!)
*/
async function expandFiles(files, name, output, config, rules) {
return processFiles(files, loadConfig, getExpandProcessor, name, output, config, rules);
}
/**
* Expand MDAT comments in a Markdown string
*/
async function expandString(markdown, config, rules) {
return processString(markdown, loadConfig, getExpandProcessor, config, rules);
}
/**
* Check and validate MDAT comments in one or more Markdown files
* @returns an array of VFiles (Even if you only pass a single file path!)
*/
async function checkFiles(files, name, output, config, rules) {
return processFiles(files, loadConfig, getCheckProcessor, name, output, config, rules);
}
/**
* Check and validate MDAT comments in a Markdown string
*/
async function checkString(markdown, config, rules) {
return processString(markdown, loadConfig, getCheckProcessor, config, rules);
}
/**
* Collapse MDAT comments in one or more Markdown files
* Writing is the responsibility of the caller (e.g. via `await write(result)`)
* @returns an array of VFiles (Even if you only pass a single file path!)
*/
async function collapseFiles(files, name, output, config, rules) {
return processFiles(files, loadConfig, getCleanProcessor, name, output, config, rules);
}
/**
* Collapse MDAT comments in a Markdown string
*/
async function collapseString(markdown, config, rules) {
return processString(markdown, loadConfig, getCleanProcessor, config, rules);
}
//#endregion
//#region src/lib/readme/rules/badges.ts
var badges_default = { badges: {
async content(options) {
const validOptions = z.object({
custom: z.record(z.object({
image: z.string(),
link: z.string()
})).optional(),
npm: z.array(z.string()).optional()
}).optional().parse(options);
const packageJson = await getPackageJson();
const { name } = packageJson;
const badges = [];
if (validOptions?.npm === void 0) {
if (!packageJson.name.startsWith("@") || packageJson.publishConfig?.access === "public") badges.push(`[](https://npmjs.com/package/${name})`);
} else for (const name of validOptions.npm) badges.push(`[](https://npmjs.com/package/${name})`);
const { license } = packageJson;
if (license !== void 0) badges.push(`[}-yellow.svg)](https://opensource.org/licenses/${license})`);
if (validOptions?.custom !== void 0) for (const [name, { image, link }] of Object.entries(validOptions.custom)) badges.push(`[](${link})`);
return badges.join("\n");
},
order: 3
} };
//#endregion
//#region src/lib/readme/rules/banner.ts
var banner_default = { banner: {
async content(options) {
const validOptions = z.object({
alt: z.string().optional(),
src: z.string().optional()
}).optional().parse(options);
const { assetsPath, packageFile } = await getConfig();
if (packageFile === void 0) throw new Error("No package.json found");
const src = validOptions?.src ?? await getBannerSrc(assetsPath) ?? await getBannerSrc();
if (src === void 0) throw new Error("Banner image not found at any typical location, consider adding something at ./assets/banner.webp");
else if (!isUrl(src) && !await isFile(src)) throw new Error(`Banner image not found at "${src}"`);
let alt = validOptions?.alt;
if (alt === void 0) {
const packageName = (await readPackage({ cwd: path.dirname(packageFile) })).name;
if (packageName === void 0) throw new Error("Banner image alt text not available");
alt = `${packageName} banner`;
}
return ``;
},
order: 2
} };
async function getBannerSrc(specificPath) {
const { packageFile } = await getConfig();
if (packageFile === void 0) throw new Error("No package.json found");
const packageDirectory = path.dirname(packageFile);
const paths = await globby((specificPath === void 0 ? [
".",
"assets",
"media",
"readme-assets",
"readme-media",
"readme",
"images",
".github/assets"
] : [specificPath]).map((location) => path.join(packageDirectory, location)), {
deep: 1,
expandDirectories: {
extensions: [
"png",
"gif",
"jpg",
"jpeg",
"svg",
"webp"
],
files: [
"banner",
"cover",
"demo",
"header",
"hero",
"image",
"logo",
"overview",
"readme",
"screenshot",
"screenshots",
"splash"
]
}
});
if (paths.length > 0) return path.relative(process.cwd(), paths[0]);
}
function isUrl(text, lenient = true) {
if (typeof text !== "string") throw new TypeError("Expected a string");
text = text.trim();
if (text.includes(" ")) return false;
try {
new URL(text);
return true;
} catch {
if (lenient) return isUrl(`https://${text}`, false);
return false;
}
}
//#endregion
//#region src/lib/readme/rules/code.ts
var code_default = { code: { async content(options) {
const validOptions = z.object({
file: z.string(),
language: z.string().optional(),
trim: z.boolean().default(true)
}).parse(options);
const lang = (path.extname(validOptions.file) ?? "").replace(/^\./, "");
const exampleCode = await fs.readFile(path.join(process.cwd(), validOptions.file), "utf8");
return `\`\`\`${lang}\n${validOptions.trim ? exampleCode.trim() : exampleCode}\n\`\`\``;
} } };
//#endregion
//#region src/lib/readme/rules/contributing.ts
var contributing_default = { contributing: {
async content() {
const issuesUrl = (await getPackageJson()).bugs?.url;
if (issuesUrl === void 0) throw new Error("Could not find \"bugs.url\" entry in package.json");
return `## Contributing\n[Issues](${issuesUrl}) and pull requests are welcome.`;
},
order: 15
} };
//#endregion
//#region src/lib/readme/rules/description.ts
/**
* Simple alias for short-description
*/
var description_default = { description: {
async content() {
const packageJson = await getPackageJson();
if (packageJson.description === void 0) throw new Error("Could not find \"description\" entry in package.json");
return `**${packageJson.description}**`;
},
order: 4
} };
//#endregion
//#region src/lib/readme/rules/license.ts
var license_default = { license: {
async content() {
const { author, license } = await getPackageJson();
if (author?.name === void 0) throw new Error("Could not find \"author.name\" entry in package.json");
if (license === void 0) throw new Error("Could not find \"license\" entry in package.json");
return `## License\n[${license}](license.txt) © ${author.name}`;
},
order: 16
} };
//#endregion
//#region src/lib/readme/rules/footer.ts
var footer_default = { footer: {
content: [getSoleRule(contributing_default), getSoleRule(license_default)],
order: 17
} };
//#endregion
//#region src/lib/readme/rules/short-description.ts
/**
* Simple alias for `description` rule, to match nomenclature in
* [standard-readme](https://github.com/RichardLitt/standard-readme/blob/main/spec.md#short-description)
* spec.
*/
var short_description_default = { "short-description": getSoleRule(description_default) };
//#endregion
//#region src/lib/readme/rules/title.ts
var title_default = { title: {
applicationOrder: 2,
async content(options) {
const { postfix, prefix, titleCase } = z.object({
postfix: z.string().optional().default(""),
prefix: z.string().optional().default(""),
titleCase: z.boolean().optional().default(false)
}).parse(options ?? {});
const { name: packageName } = await getPackageJson();
return `# ${prefix}${titleCase ? makeTitleCase(packageName) : packageName}${postfix}`;
},
order: 1
} };
function makeTitleCase(text) {
return text.split(/[ _-]+/).filter(Boolean).map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(" ");
}
//#endregion
//#region src/lib/readme/rules/header.ts
var header_default = { header: {
applicationOrder: 2,
content: [
getSoleRule(title_default),
getSoleRule(banner_default),
getSoleRule(badges_default),
getSoleRule(short_description_default)
],
order: 1
} };
//#endregion
//#region src/lib/readme/rules/utilities/size/size-report.ts
const brotliCompressAsync = promisify(brotliCompress);
const gzipCompressAsync = promisify(gzip);
/**
* Creates a SizeInfo object with formatted values
* @param bytes - Size in bytes
* @param originalSize - Original file size for percentage calculation
*/
function createSizeInfo(bytes, originalSize) {
const percent = originalSize === 0 ? 0 : (originalSize - bytes) / originalSize * 100;
return {
bytes,
bytesPretty: prettyBytes(bytes, { maximumFractionDigits: 1 }),
percent,
percentPretty: `${Math.round(percent)}%`
};
}
/**
* Analyzes a file's size and its compressed sizes using Brotli and Gzip
* @param filePath - Path to the file to analyze
* @returns Promise containing detailed size report
* @throws {Error} if file cannot be read or compressed
*/
async function createSizeReport(filePath) {
try {
const fileContent = await fs.readFile(filePath);
const originalSize = fileContent.length;
const [brotliCompressed, gzipCompressed] = await Promise.all([brotliCompressAsync(fileContent), gzipCompressAsync(fileContent)]);
return {
brotli: createSizeInfo(brotliCompressed.length, originalSize),
gzip: createSizeInfo(gzipCompressed.length, originalSize),
original: createSizeInfo(originalSize, originalSize)
};
} catch (error) {
throw new Error(`Failed to analyze file: ${error instanceof Error ? error.message : "Unknown error"}`);
}
}
//#endregion
//#region src/lib/readme/rules/size.ts
const optionsSchema$2 = z.object({
compression: z.enum([
"none",
"brotli",
"gzip"
]).optional().default("none"),
file: z.string()
});
function getSizeForCompression(report, compression) {
switch (compression) {
case "brotli": return report.brotli.bytesPretty;
case "gzip": return report.gzip.bytesPretty;
case "none": return report.original.bytesPretty;
}
}
var size_default = { size: { async content(options) {
const validOptions = optionsSchema$2.parse(options);
return getSizeForCompression(await createSizeReport(path.join(process.cwd(), validOptions.file)), validOptions.compression);
} } };
//#endregion
//#region src/lib/readme/rules/size-table.ts
const fileSchema = z.union([z.string(), z.array(z.string())]).transform((files) => Array.isArray(files) ? files : [files]);
const optionsSchema$1 = z.union([z.object({
brotli: z.boolean().optional().default(true),
file: fileSchema,
gzip: z.boolean().optional().default(true),
original: z.boolean().optional().default(true),
showPercentage: z.boolean().optional().default(false)
}), z.object({
brotli: z.boolean().optional().default(true),
files: fileSchema,
gzip: z.boolean().optional().default(true),
original: z.boolean().optional().default(true),
showPercentage: z.boolean().optional().default(false)
})]).transform((options) => {
if ("file" in options) {
const { file, ...rest } = options;
return {
files: file,
...rest
};
}
return options;
});
function formatMarkdownTable(reports, options) {
const headers = ["File"];
if (options.original) headers.push("Original");
if (options.gzip) headers.push("Gzip");
if (options.brotli) headers.push("Brotli");
const separators = headers.map(() => "---");
const rows = reports.map(([file, report]) => {
const row = [path.basename(file)];
if (options.original) row.push(report.original.bytesPretty);
if (options.gzip) row.push(options.showPercentage ? `${report.gzip.bytesPretty} (${report.gzip.percentPretty})` : report.gzip.bytesPretty);
if (options.brotli) row.push(options.showPercentage ? `${report.brotli.bytesPretty} (${report.brotli.percentPretty})` : report.brotli.bytesPretty);
return row;
});
return [
`| ${headers.join(" | ")} |`,
`| ${separators.join(" | ")} |`,
...rows.map((row) => `| ${row.join(" | ")} |`)
].join("\n");
}
var size_table_default = { "size-table": { async content(options) {
const validOptions = optionsSchema$1.parse(options);
return formatMarkdownTable(await Promise.all(validOptions.files.map(async (file) => {
return [file, await createSizeReport(path.join(process.cwd(), file))];
})), {
brotli: validOptions.brotli,
gzip: validOptions.gzip,
original: validOptions.original,
showPercentage: validOptions.showPercentage
});
} } };
//#endregion
//#region src/lib/readme/rules/table-of-contents.ts
var table_of_contents_default = { "table-of-contents": {
applicationOrder: 1,
async content(options, tree) {
const result = toc(tree, {
heading: null,
maxDepth: z.object({ depth: z.union([
z.literal(1),
z.literal(2),
z.literal(3),
z.literal(4),
z.literal(5),
z.literal(6)
]).optional() }).optional().parse(options)?.depth ?? 3,
tight: true
});
const heading = `## Table of contents`;
if (result.map === void 0) throw new Error("Could not generate table of contents");
const rootWrapper = {
children: result.map.children,
type: "root"
};
return [heading, remark().use(remarkGfm).stringify(rootWrapper).replaceAll("\n\n", "\n")].join("\n");
},
order: 6
} };
//#endregion
//#region src/lib/readme/rules/toc.ts
/**
* Simple alias for table-of-contents
*/
var toc_default = { toc: getSoleRule(table_of_contents_default) };
//#endregion
//#region src/lib/readme/rules/index.ts
var rules_default = {
...badges_default,
...banner_default,
...code_default,
...contributing_default,
...description_default,
...footer_default,
...header_default,
...license_default,
...short_description_default,
...size_default,
...size_table_default,
...table_of_contents_default,
...title_default,
...toc_default
};
//#endregion
//#region src/lib/readme/config.ts
/**
* Convenience loader to always include the default readme config.
* The readme defaults should have lower priority than searched/user config,
* but higher priority than base mdat defaults.
*/
async function loadConfigReadme(options) {
const defaultReadmeConfig = {
addMetaComment: true,
rules: rules_default
};
const { additionalConfig = [], readmeDefaults = defaultReadmeConfig, ...rest } = options ?? {};
const result = await loadConfig({
additionalConfig: Array.isArray(additionalConfig) ? additionalConfig : [additionalConfig],
readmeDefaults,
...rest
});
if (result.packageFile === void 0) throw new Error("Package file path is required in `mdat readme` config");
return result;
}
//#endregion
//#region src/lib/readme/utilities.ts
/**
* Searches for a readme file in the following order:
* 1. Searches the current working directly for readme.md
* 2. If there's no readme.md in the current directory, search up to the closest package directory
* 3. Give up and return undefined if no readme is found
* @returns The path to the readme file or undefined if not found
*/
async function findReadme() {
log.info(`Searching for package directory...`);
const closestReadme = await findUp("readme.md", {
stopAt: await packageDirectory() ?? process.cwd(),
type: "file"
});
if (closestReadme !== void 0) {
log.info(`Found closest readme at "${closestReadme}"`);
return closestReadme;
}
}
/**
* Searches up for a readme.md file
* @see `findReadme()` for more details on the search process.
* @returns The path to the readme file
* @throws {Error} if no readme is found
*/
async function findReadmeThrows() {
const readme = await findReadme();
if (readme === void 0) throw new Error("No readme found");
return readme;
}
//#endregion
//#region src/lib/readme/api.ts
/**
* Expands MDAT readme comments in the closest readme.md file
* Basically an alias to `expandReadmeFiles()` with certain arguments elided.
* @see `findReadme()` for more details on the search process.
*/
async function expandReadme(config, rules) {
return expandReadmeFiles(void 0, void 0, void 0, config, rules);
}
/**
* Expands MDAT readme comments in one or more Markdown files
* Searches up for a readme.md file if none is provided.
* @see `findReadme()` for more details on the search process.
*/
async function expandReadmeFiles(files, name, output, config, rules) {
files ??= await findReadmeThrows();
return processFiles(files, loadConfigReadme, getExpandProcessor, name, output, config, rules);
}
/**
* Expands MDAT readme comments in a Markdown string
*/
async function expandReadmeString(markdown, config, rules) {
return processString(markdown, loadConfigReadme, getExpandProcessor, config, rules);
}
/**
* Checks and validates MDAT readme comments in the closest readme.md file
* Basically an alias to `checkReadmeFiles()` with certain arguments elided.
* @see `findReadme()` for more details on the search process.
*/
async function checkReadme(config, rules) {
return checkReadmeFiles(void 0, void 0, void 0, config, rules);
}
/**
* Checks and validates MDAT readme comments in one or more Markdown files
* Searches up for a readme.md file if none is provided.
* @see `findReadme()` for more details on the search process.
*/
async function checkReadmeFiles(files, name, output, config, rules) {
files ??= await findReadmeThrows();
return processFiles(files, loadConfigReadme, getCheckProcessor, name, output, config, rules);
}
/**
* Checks and validates MDAT readme comments in a Markdown string
*/
async function checkReadmeString(markdown, config, rules) {
return processString(markdown, loadConfigReadme, getCheckProcessor, config, rules);
}
/**
* Collapses MDAT readme comments in the closest readme.md file
* Basically an alias to `collapseReadmeFiles()` with certain arguments elided.
* @see `findReadme()` for more details on the search process.
*/
async function collapseReadme(config, rules) {
return collapseReadmeFiles(void 0, void 0, void 0, config, rules);
}
/**
* Collapses MDAT readme comments in one or more Markdown files
* Searches up for a readme.md file if none is provided.
* @see `findReadme()` for more details on the search process.
*/
async function collapseReadmeFiles(files, name, output, config, rules) {
files ??= await findReadmeThrows();
return processFiles(files, loadConfigReadme, getCleanProcessor, name, output, config, rules);
}
//#endregion
export { checkFiles, checkReadme, checkReadmeFiles, checkReadmeString, checkString, collapseFiles, collapseReadme, collapseReadmeFiles, collapseString, expandFiles, expandReadme, expandReadmeFiles, expandReadmeString, expandString, loadConfig, loadConfigReadme, mergeConfigs };