UNPKG

@stephansama/auto-readme

Version:

Generate lists and tables for your README automagically based on your repository and comments

683 lines (671 loc) 25.7 kB
//#region rolldown:runtime var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) { key = keys[i]; if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: ((k) => from[k]).bind(null, key), enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod)); //#endregion let mdast_util_from_markdown = require("mdast-util-from-markdown"); mdast_util_from_markdown = __toESM(mdast_util_from_markdown); let node_child_process = require("node:child_process"); node_child_process = __toESM(node_child_process); let node_fs_promises = require("node:fs/promises"); node_fs_promises = __toESM(node_fs_promises); let ora = require("ora"); ora = __toESM(ora); let debug = require("debug"); debug = __toESM(debug); let yargs = require("yargs"); yargs = __toESM(yargs); let yargs_helpers = require("yargs/helpers"); yargs_helpers = __toESM(yargs_helpers); let zod = require("zod"); zod = __toESM(zod); let mdast_comment_marker = require("mdast-comment-marker"); mdast_comment_marker = __toESM(mdast_comment_marker); let __iarna_toml = require("@iarna/toml"); __iarna_toml = __toESM(__iarna_toml); let cosmiconfig = require("cosmiconfig"); cosmiconfig = __toESM(cosmiconfig); let deepmerge = require("deepmerge"); deepmerge = __toESM(deepmerge); let __manypkg_get_packages = require("@manypkg/get-packages"); __manypkg_get_packages = __toESM(__manypkg_get_packages); let node_fs = require("node:fs"); node_fs = __toESM(node_fs); let node_path = require("node:path"); node_path = __toESM(node_path); let pkg_types = require("pkg-types"); pkg_types = __toESM(pkg_types); let yaml = require("yaml"); yaml = __toESM(yaml); let zod2md = require("zod2md"); zod2md = __toESM(zod2md); let fast_glob = require("fast-glob"); fast_glob = __toESM(fast_glob); let remark = require("remark"); remark = __toESM(remark); let remark_code_import = require("remark-code-import"); remark_code_import = __toESM(remark_code_import); let remark_collapse = require("remark-collapse"); remark_collapse = __toESM(remark_collapse); let remark_toc = require("remark-toc"); remark_toc = __toESM(remark_toc); let remark_usage = require("remark-usage"); remark_usage = __toESM(remark_usage); let vfile = require("vfile"); vfile = __toESM(vfile); let handlebars = require("handlebars"); handlebars = __toESM(handlebars); let markdown_table = require("markdown-table"); markdown_table = __toESM(markdown_table); let mdast_zone = require("mdast-zone"); mdast_zone = __toESM(mdast_zone); //#region src/schema.ts const actionsSchema = zod.enum([ "ACTION", "PKG", "USAGE", "WORKSPACE", "ZOD" ]).meta({ description: "Comment action options" }); const formatsSchema = zod.enum(["LIST", "TABLE"]).default("TABLE"); const languageSchema = zod.enum(["JS", "RS"]).default("JS"); const headingsSchema = zod.enum([ "default", "description", "devDependency", "downloads", "name", "private", "required", "version" ]).meta({ description: "Table heading options" }); const tableHeadingsSchema = zod.record(actionsSchema, headingsSchema.array().optional()).default({ ACTION: [ "name", "required", "default", "description" ], PKG: [ "name", "version", "devDependency" ], USAGE: [], WORKSPACE: [ "name", "version", "downloads", "description" ], ZOD: [] }).meta({ description: "Table heading action configuration" }); const templatesSchema = zod.object({ downloadImage: zod.string().default("https://img.shields.io/npm/dw/{{name}}?labelColor=211F1F"), emojis: zod.record(headingsSchema, zod.string()).default({ default: "⚙️", description: "📝", devDependency: "💻", downloads: "📥", name: "🏷️", private: "🔒", required: "", version: "" }).meta({ description: "Table heading emojis used when enabled" }), registryUrl: zod.string().default("https://www.npmjs.com/package/{{name}}"), versionImage: zod.string().default("https://img.shields.io/npm/v/{{uri_name}}?logo=npm&logoColor=red&color=211F1F&labelColor=211F1F") }); const defaultTemplates = templatesSchema.parse({}); const defaultTableHeadings = tableHeadingsSchema.parse(void 0); const _configSchema = zod.object({ affectedRegexes: zod.string().array().default([]), collapseHeadings: zod.string().array().default([]), defaultLanguage: languageSchema.meta({ alias: "l", description: "Default language to infer projects from" }), disableEmojis: zod.boolean().default(false).meta({ alias: "e", description: "Whether or not to use emojis in markdown table headings" }), disableMarkdownHeadings: zod.boolean().default(false).meta({ description: "Whether or not to display markdown headings" }), enablePrettier: zod.boolean().default(true).meta({ description: "Whether or not to use prettier to format the files" }), enableToc: zod.boolean().default(false).meta({ alias: "t", description: "generate table of contents for readmes" }), enableUsage: zod.boolean().default(false).meta({ description: "Whether or not to enable usage plugin" }), headings: tableHeadingsSchema.optional().default(defaultTableHeadings).describe("List of headings for different table outputs"), onlyReadmes: zod.boolean().default(true).meta({ alias: "r", description: "Whether or not to only traverse readmes" }), onlyShowPublicPackages: zod.boolean().default(false).meta({ alias: "p", description: "Only show public packages in workspaces" }), removeScope: zod.string().default("").meta({ description: "Remove common workspace scope" }), templates: templatesSchema.optional().default(defaultTemplates).describe("Handlebars templates used to fuel list and table generation"), tocHeading: zod.string().default("Table of contents").meta({ description: "Markdown heading used to generate table of contents" }), usageFile: zod.string().default("").meta({ description: "Workspace level usage file" }), usageHeading: zod.string().default("Usage").meta({ description: "Markdown heading used to generate usage example" }), verbose: zod.boolean().default(false).meta({ alias: "v", description: "whether or not to display verbose logging" }) }); const configSchema = _configSchema.optional(); //#endregion //#region src/args.ts const complexOptions = [ "affectedRegexes", "collapseHeadings", "headings", "templates" ]; const args = { ...zodToYargs(), changes: { alias: "g", default: false, description: "Check only changed git files", type: "boolean" }, check: { alias: "k", default: false, description: "Do not write to files. Only check for changes", type: "boolean" }, config: { alias: "c", description: "Path to config file", type: "string" } }; async function parseArgs() { const yargsInstance = (0, yargs.default)((0, yargs_helpers.hideBin)(process.argv)).options(args).help("h").alias("h", "help").epilogue(`--> @stephansama open-source ${(/* @__PURE__ */ new Date()).getFullYear()}`); const parsed = await yargsInstance.wrap(yargsInstance.terminalWidth()).parse(); if (parsed.verbose) debug.default.enable("autoreadme*"); return parsed; } function zodToYargs() { const { shape } = configSchema.unwrap(); const entries = Object.entries(shape).map(([key, value]) => { if (complexOptions.includes(key)) return []; if (value.def.innerType instanceof zod.ZodObject) return []; const meta = value.meta(); const { innerType } = value.def; const isBoolean = innerType instanceof zod.ZodBoolean; const isNumber = innerType instanceof zod.ZodNumber; const yargType = innerType instanceof zod.ZodArray && "array" || isNumber && "number" || isBoolean && "boolean" || "string"; const options = { default: value.def.defaultValue, type: yargType }; if (meta?.alias) options.alias = meta.alias; if (meta?.description) options.description = meta.description; return [key, options]; }); return Object.fromEntries(entries); } //#endregion //#region src/log.ts const error = (0, debug.default)("autoreadme:error"); const info = (0, debug.default)("autoreadme:info"); const warn = (0, debug.default)("autoreadme:warn"); function ERROR(...rest) { const [first, ...remaining] = rest; error(`${first} %O`, ...remaining); } function INFO(...rest) { const [first, ...remaining] = rest; info(`${first} %O`, ...remaining); } function WARN(...rest) { const [first, ...remaining] = rest; warn(`${first} %O`, ...remaining); } //#endregion //#region src/comment.ts const SEPARATOR = "-"; function loadAstComments(root) { return root.children.map((child) => child.type === "html" && getComment(child)).filter((f) => f !== false); } function parseComment(comment) { const [type, ...parameters] = trimComment(comment).split(" "); const [first, second, third] = type.split(SEPARATOR); INFO("parsing inputs", { first, second, third }); const languageInput = third ? first : void 0; const actionInput = third ? second : first; const formatInput = third ? third : second; const language = languageSchema.parse(languageInput); const parsed = { action: actionsSchema.parse(actionInput), format: formatsSchema.parse(formatInput), isStart: comment.includes("start"), language, parameters }; INFO(`Parsed comment ${comment}`, parsed); return parsed; } const startComment = "<!--"; const endComment = "-->"; function trimComment(comment) { return comment.replace(startComment, "").replace(/start|end/, "").replace(endComment, "").trim(); } function getComment(comment) { if (!isComment(comment.value)) return false; if (!(0, mdast_comment_marker.commentMarker)(comment)) return false; return parseComment(comment.value); } function isComment(comment) { return comment.startsWith(startComment) && comment.endsWith(endComment); } //#endregion //#region src/config.ts const moduleName = "autoreadme"; const searchPlaces = getSearchPlaces(); const loaders = { [".toml"]: loadToml }; async function loadConfig(args$1) { const opts$1 = { loaders, searchPlaces }; if (args$1.config) opts$1.searchPlaces = [args$1.config]; const search = await (0, cosmiconfig.cosmiconfig)(moduleName, opts$1).search(); if (!search) { WARN(`no config file found`, args$1.config ? " at location: " + args$1.config : ""); INFO("using default configuration"); } else { INFO("found configuration file at: ", search.filepath); INFO("loaded cosmiconfig", search.config); } args$1 = removeFalsy(args$1); INFO("merging config with args", args$1); return configSchema.parse((0, deepmerge.default)(search?.config || {}, args$1, { arrayMerge: (_, sourceArray) => sourceArray })); } function loadToml(_filepath, content) { return __iarna_toml.default.parse(content); } function getSearchPlaces() { return [ ...(0, cosmiconfig.getDefaultSearchPlaces)(moduleName), `.${moduleName}rc.toml`, `.config/.${moduleName}rc`, `.config/${moduleName}rc.toml`, `.config/.${moduleName}rc.toml`, `.config/.${moduleName}rc.json`, `.config/.${moduleName}rc.yaml`, `.config/.${moduleName}rc.yml` ]; } function removeFalsy(obj) { return Object.fromEntries(Object.entries(obj).map(([k, v]) => !v ? false : [k, v]).filter((e) => Boolean(e))); } //#endregion //#region src/utils.ts const sh = String.raw; const opts = { encoding: "utf8" }; const ignore = ["**/node_modules/**"]; const matches = [ /.*README\.md$/gi, /.*Cargo\.toml$/gi, /.*action\.ya?ml$/gi, /.*package\.json$/gi, /.*pnpm-workspace\.yaml$/gi ]; async function fileExists(file) { return await node_fs_promises.access(file).then(() => true).catch(() => false); } function findAffectedMarkdowns(root, config) { const affected = node_child_process.execSync(sh`git diff --cached --name-only --diff-filter=MACT`, opts).trim().split("\n").filter(Boolean); if (!affected.length) ERROR("no staged files found"); if (config.affectedRegexes?.length) INFO("adding the following expressions: ", config.affectedRegexes); const allMatches = [...matches, ...config.affectedRegexes?.map((r) => new RegExp(r)) || []]; INFO("Checking affected files against regexes", affected, allMatches); const eligible = affected.filter((a) => allMatches.some((m) => a.match(m))); INFO("Found the following eligible affected files", eligible); const md = eligible.map((e) => findNearestReadme(root, node_path.resolve(e))); const rootMd = node_path.join(root, "README.md"); const dedupe = [...new Set(md), rootMd].filter((s) => Boolean(s)); INFO("Found the following readmes", dedupe); return dedupe; } function getGitRoot() { const root = node_child_process.execSync(sh`git rev-parse --show-toplevel`, opts).trim(); if (!root) throw new Error("must be ran within a git directory."); INFO("found git root at location: ", root); return root; } async function getMarkdownPaths(cwd, config) { return (await (0, fast_glob.default)(`**/${config?.onlyReadmes ? "README" : "*"}.md`, { cwd, ignore })).map((readme) => node_path.resolve(cwd, readme)); } async function getPrettierPaths(paths) { return await Promise.all(paths.map(async (file) => { if (!(await node_fs_promises.lstat(file)).isSymbolicLink()) return file; const symlink = await node_fs_promises.readlink(file); return node_path.join(node_path.dirname(file), symlink); })); } function findNearestReadme(gitRoot, inputFile, maxRotations = 15) { let dir = node_path.dirname(inputFile); let rotations = 0; while (true) { const option = node_path.join(dir, "README.md"); if (node_fs.existsSync(option)) return option; const parent = node_path.dirname(dir); if (parent === dir || dir === gitRoot || ++rotations > maxRotations) break; dir = parent; } return null; } //#endregion //#region src/data.ts function createFindParameter(parameterList) { return function(parameterName) { return parameterList?.find((p) => p.startsWith(parameterName))?.replace(parameterName + "=", "")?.replace(/"/gi, "")?.replace(/_/gi, " "); }; } async function loadActionData(actions, file, root) { const startActions = actions.filter((action) => action.isStart); return await Promise.all(startActions.map(async (action) => { const find = createFindParameter(action.parameters); switch (action.action) { case "ACTION": { const actionYaml = await loadActionYaml(node_path.dirname(file)); return { action: action.action, actionYaml, parameters: action.parameters }; } case "PKG": { const inputPath = find("path"); const pkgJson = await (0, pkg_types.readPackageJSON)(inputPath ? node_path.resolve(node_path.dirname(file), inputPath) : node_path.dirname(file)); return { action: action.action, parameters: action.parameters, pkgJson }; } case "USAGE": return { action: action.action, parameters: action.parameters }; case "WORKSPACE": { const workspaces = await (0, __manypkg_get_packages.getPackages)(process.cwd()); const pnpmPath = node_path.resolve(root, "pnpm-workspace.yaml"); const isPnpm = node_fs.existsSync(pnpmPath); return { action: action.action, isPnpm, parameters: action.parameters, root, workspaces }; } case "ZOD": { if (action.format === "LIST") throw new Error("cannot display zod in list format"); const inputPath = find("path"); if (!inputPath) { const error$1 = `no path found for zod table at markdown file ${file}`; throw new Error(error$1); } const body = await (0, zod2md.zod2md)({ entry: node_path.resolve(node_path.dirname(file), inputPath), title: find("title") || "Zod Schema" }); return { action: action.action, body, parameters: action.parameters }; } default: throw new Error("feature not yet implemented"); } })); } async function loadActionYaml(baseDir) { const actionYmlPath = node_path.resolve(baseDir, "action.yml"); const actionYamlPath = node_path.resolve(baseDir, "action.yaml"); const actualPath = await fileExists(actionYamlPath) && actionYamlPath || await fileExists(actionYmlPath) && actionYmlPath; if (!actualPath) { const error$1 = `no yaml file found at locations: ${[actionYmlPath, actionYamlPath]}`; throw new Error(error$1); } const actionFile = await node_fs_promises.readFile(actualPath, { encoding: "utf8" }); return yaml.parse(actionFile); } //#endregion //#region src/plugin.ts function createHeading(headings, disableEmojis = false, emojis = defaultTemplates.emojis) { return headings.map((h) => `${disableEmojis ? "" : emojis[h] + " "}${h?.at(0)?.toUpperCase() + h?.slice(1)}`); } function wrapRequired(required, input) { if (!required) return input; return `<b>*${input}</b>`; } const autoReadmeRemarkPlugin = (config, data) => (tree) => { (0, mdast_zone.zone)(tree, /.*ZOD.*/gi, function(start, _, end) { const zod$1 = data.find((d) => d?.action === "ZOD"); if (!zod$1?.body) throw new Error("unable to load zod body"); return [ start, (0, mdast_util_from_markdown.fromMarkdown)(zod$1.body), end ]; }); (0, mdast_zone.zone)(tree, /.*ACTION.*/gi, function(start, _, end) { const value = start.type === "html" && start.value; const options = value && parseComment(value); if (!options) throw new Error("not able to parse comment"); const inputs = data.find((d) => d?.action === "ACTION")?.actionYaml?.inputs || {}; const heading = `### ${config.disableEmojis ? "" : "🧰"} actions`; if (options.format === "LIST") return [ start, (0, mdast_util_from_markdown.fromMarkdown)(`${heading}\n` + Object.entries(inputs).sort((a) => a[1].required ? -1 : 1).map(([key, value$1]) => { return `- ${wrapRequired(value$1.required, key)}: (default: ${value$1.default})\n\n${value$1.description}`; }).join("\n")), end ]; const headings = config.headings?.ACTION?.length && config.headings.ACTION || defaultTableHeadings.ACTION; return [ start, (0, mdast_util_from_markdown.fromMarkdown)([ heading, "", (0, markdown_table.markdownTable)([createHeading(headings, config.disableEmojis, config.templates?.emojis), ...Object.entries(inputs).map(([k, v]) => headings.map((heading$1) => v[heading$1] || k).map(String))]) ].join("\n")), end ]; }); (0, mdast_zone.zone)(tree, /.*WORKSPACE.*/gi, function(start, _, end) { const value = start.type === "html" && start.value; const comment = value && parseComment(value); const workspace = data.find((d) => d?.action === "WORKSPACE"); const templates = loadTemplates(config.templates); const packages = workspace?.workspaces?.packages || []; const headings = config.headings?.WORKSPACE?.length && config.headings?.WORKSPACE || defaultTableHeadings.WORKSPACE; if (comment && comment.format === "LIST") {} const table = (0, markdown_table.markdownTable)([createHeading(headings, config.disableEmojis, config.templates?.emojis), ...packages.filter((pkg) => config.onlyShowPublicPackages ? !pkg.packageJson.private : true).map((pkg) => { const { name } = pkg.packageJson; return headings.map((heading) => { if (heading === "name") return `[${config.removeScope ? name.replace(config.removeScope, "") : name}](${node_path.default.relative(process.cwd(), node_path.default.resolve(pkg.dir, "README.md"))})`; if (heading === "version") return `![npm version image](${templates.versionImage({ uri_name: encodeURIComponent(name) })})`; if (heading === "downloads") return `![npm downloads](${templates.downloadImage({ name })})`; if (heading === "description") return pkg.packageJson?.description; return ``; }); })]); return [ start, (0, mdast_util_from_markdown.fromMarkdown)([ `### ${config.disableEmojis ? "" : "🏭"} workspace`, "", table ].join("\n")), end ]; }); (0, mdast_zone.zone)(tree, /.*PKG.*/gi, function(start, _, end) { const value = start.type === "html" && start.value; const comment = value && parseComment(value); const first = data.find((d) => d?.action === "PKG"); const templates = loadTemplates(config.templates); const headings = config.headings?.PKG?.length && config.headings?.PKG || defaultTableHeadings.PKG; if (comment && comment.format === "LIST") return [ start, (0, mdast_util_from_markdown.fromMarkdown)(""), end ]; function mapDependencies(isDev) { return function([name, version]) { const url = templates.registryUrl({ name }); return headings.map((key) => { if (key === "devDependency") { if (config.disableEmojis) return `\`${isDev}\``; return `${isDev ? "⌨️" : "👥"}`; } if (key === "name") return `[${name}](${url})`; if (key === "version") { if ([ "workspace", "catalog", "*" ].some((type) => version.includes(type))) return `\`${version}\``; return `![npm version](${templates.versionImage({ uri_name: encodeURIComponent(name) })})`; } }); }; } const { dependencies = {}, devDependencies = {} } = first?.pkgJson || {}; const table = (0, markdown_table.markdownTable)([ createHeading(headings, config.disableEmojis, config.templates?.emojis), ...Object.entries(devDependencies).map(mapDependencies(true)), ...Object.entries(dependencies).map(mapDependencies(false)) ]); return [ start, (0, mdast_util_from_markdown.fromMarkdown)([ `### ${config.disableEmojis ? "" : "📦"} packages`, "", table ].join("\n")), end ]; }); }; function loadTemplates(templates) { if (!templates) throw new Error("failed to load templates"); return Object.fromEntries(Object.entries(templates).map(([key, value]) => { if (typeof value !== "string") return []; return [key, handlebars.default.compile(value)]; })); } //#endregion //#region src/pipeline.ts async function parse(file, filepath, root, config, data) { const pipeline = (0, remark.remark)().use(autoReadmeRemarkPlugin, config, data).use(remark_code_import.default, {}); const usage = data.find((d) => d.action === "USAGE"); if (usage?.action === "USAGE" || config.enableUsage) { const examplePath = createFindParameter(usage?.parameters || [])("path"); const dirname = node_path.dirname(filepath); const resolvePath = examplePath && node_path.resolve(dirname, examplePath); const relativeProjectPath = config.usageFile && node_path.relative(root, node_path.resolve(dirname, config.usageFile)); const example = examplePath && resolvePath && node_path.relative(root, resolvePath) || relativeProjectPath || void 0; if (example && await fileExists(example)) { INFO("generating usage section"); pipeline.use(remark_usage.default, { example, heading: config.usageHeading }); } else WARN("not able to find example file for readme", filepath, example); } if (config.enableToc) { INFO("generating table of contents section"); pipeline.use(remark_toc.default, { heading: config.tocHeading }); } if (config.enableToc || config.collapseHeadings?.length) { const headings = [...config.collapseHeadings?.length ? config.collapseHeadings : [], config.tocHeading]; pipeline.use(remark_collapse.default, { test: { ignoreFinalDefinitions: true, test: (value, _) => { return headings.some((i) => value.trim() === i?.trim()); } } }); } const vfile$1 = new vfile.VFile({ path: node_path.resolve(filepath), value: file }); return (await pipeline.process(vfile$1)).toString(); } //#endregion //#region src/index.ts async function run() { const args$1 = await parseArgs(); const config = await loadConfig(args$1) || {}; INFO("Loaded the following configuration:", config); const root = getGitRoot(); const isAffected = args$1.changes && "affected"; INFO(`Loading ${!isAffected ? "all " : "affected "}files`); const paths = isAffected ? findAffectedMarkdowns(root, config) : await getMarkdownPaths(root, config); INFO("Loaded the following files:", paths.join("\n")); const type = args$1.onlyReadmes ? "readmes" : "all markdown files"; if (!paths.length) return ERROR(`no ${isAffected} readmes found to update`); const spinner = !args$1.verbose && (0, ora.default)(`Updating ${type}`).start(); await Promise.all(paths.map(async (path$1) => { const file = await node_fs_promises.readFile(path$1, { encoding: "utf8" }); const actions = (() => { return loadAstComments((0, mdast_util_from_markdown.fromMarkdown)(file)); })(); if (!actions.length) { WARN(`no action comments found in`, path$1); if (!config.enableUsage || !config.enableToc) return ERROR("no action or plugins found"); else INFO("plugins enabled. continuing parsing", path$1); } const data = await loadActionData(actions, path$1, root); INFO("Loaded comment action data", data); const content = await parse(file, path$1, root, config, data); await node_fs_promises.writeFile(path$1, content); })); const opts$1 = { stdio: "inherit" }; if (config.enablePrettier) { INFO("formatting with prettier"); const prettierPaths = await getPrettierPaths(paths); node_child_process.execFileSync("prettier", ["--write", ...prettierPaths], opts$1); } if (isAffected) { INFO("adding affected files to git stage"); node_child_process.execFileSync("git", ["add", ...paths], opts$1); } if (spinner) spinner.stop(); } //#endregion exports.run = run;