UNPKG

portfolio-xs

Version:

This is a tool to generate portfolio based with your markdown file

504 lines (425 loc) โ€ข 17.2 kB
import path from 'path'; import fse from 'fs-extra'; import { marked } from 'marked'; import { fileURLToPath } from 'url'; import { build } from 'esbuild'; import zlib from 'zlib'; import sharp from 'sharp'; import fs, { readdirSync, createReadStream, createWriteStream } from 'fs'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const projectRoot = path.join(__dirname, '..'); const srcDir = path.join(projectRoot, 'src'); const outDir = path.resolve('./'); const renderer = { image({ href, title, text }) { console.log('Image href:', href); console.log('Alt text:', text); return `<img src="${href}" alt="${text}" class="mark-img" />`; } }; marked.use({ renderer }); function injectSettings(html, settings) { return html .replace(/<title>.*<\/title>/, `<title>${settings.title || ''}</title>`) .replace( '</head>', `${settings.favicon ? `<link rel="icon" href="${settings.favicon}">` : ''}</head>` ); } // -------- helpers -------- const walkFiles = (dir) => { const files = []; const items = readdirSync(dir, { withFileTypes: true }); for (const it of items) { const full = path.join(dir, it.name); if (it.isDirectory()) files.push(...walkFiles(full)); else files.push(full); } return files; }; function copyLocalImages(markdown, srcDir, destDir, assetPrefix = '') { const imageRegex = /!\[[^\]]*\]\((.*?)\)/g; const copiedMap = new Map(); let match; while ((match = imageRegex.exec(markdown)) !== null) { const imagePath = match[1]; if (!imagePath.startsWith('http') && !copiedMap.has(imagePath)) { const absSource = path.resolve(srcDir, imagePath); const absTarget = path.resolve(destDir, imagePath); if (fs.existsSync(absSource)) { fse.ensureDirSync(path.dirname(absTarget)); fs.copyFileSync(absSource, absTarget); copiedMap.set(imagePath, true); } } } return markdown.replace(imageRegex, (full, imgPath) => { if (imgPath.startsWith('http')) return full; return full.replace(imgPath, path.join(assetPrefix, imgPath)); }); } function renderAboutPage(workDir, buildDir) { const aboutMdPath = path.join(workDir, 'about', 'about.md'); const pagesDir = path.join(buildDir, 'pages'); const assetsDir = path.join(buildDir, 'assets', 'about'); fse.ensureDirSync(pagesDir); if (!fs.existsSync(aboutMdPath)) return; const markdown = fs.readFileSync(aboutMdPath, 'utf-8'); const updatedMarkdown = copyLocalImages(markdown, path.dirname(aboutMdPath), assetsDir, '/assets/about'); const htmlContent = marked.parse(updatedMarkdown); const componentContent = ` import React from 'react'; export default function About() { return ( <div className="about-markdown" dangerouslySetInnerHTML={{ __html: ${JSON.stringify(htmlContent)} }} /> ); }`; fs.writeFileSync(path.join(pagesDir, `About.js`), componentContent, 'utf-8'); } function renderMarkdownPages(docDir, buildDir) { const pagesDir = path.join(buildDir, 'pages'); const coversDir = path.join(buildDir, 'covers'); const metaFile = path.join(buildDir, 'rawList.json'); const metadataList = []; const pageMapImports = []; const pageMapEntries = []; fse.ensureDirSync(pagesDir); fse.ensureDirSync(coversDir); const folders = fs.readdirSync(docDir).filter(folder => fs.lstatSync(path.join(docDir, folder)).isDirectory() ); folders.forEach(folder => { const folderPath = path.join(docDir, folder); const mdPath = path.join(folderPath, 'index.md'); if (!fs.existsSync(mdPath)) return; let markdown = fs.readFileSync(mdPath, 'utf-8'); const metaMatch = markdown.match(/---([\s\S]*?)---/); const meta = { title: folder, description: '', category: '', createDate: '', updateDate: '' }; if (metaMatch) { const lines = metaMatch[1].split('\n').map(l => l.trim()).filter(Boolean); lines.forEach(line => { const [key, ...rest] = line.split(':'); if (key && rest.length) meta[key.trim()] = rest.join(':').trim(); }); markdown = markdown.replace(metaMatch[0], '').trim(); } const assetsDir = path.join(buildDir, 'assets', folder); markdown = copyLocalImages(markdown, folderPath, assetsDir, `/assets/${folder}`); const htmlContent = `<div style="padding: 9rem 4.5rem;"><h1>${meta.title}</h1><h2 style="color: #4a4a4a; font-weight: normal;">${meta.description}</h2></div>` + '<div style="padding: 0 4.5rem 4.5rem;">' + marked.parse(markdown) + '</div>'; const componentName = folder .replace(/[^a-zA-Z0-9]/g, '_') .replace(/^(\d)/, '_$1') .replace(/(^\w|_\w)/g, m => m.replace('_', '').toUpperCase()); const sourceCover = path.join(folderPath, 'index.png'); let coverUrl = ''; if (fs.existsSync(sourceCover)) { const destCover = path.join(coversDir, `${componentName}.png`); fs.copyFileSync(sourceCover, destCover); coverUrl = `./covers/${componentName}.png`; } metadataList.push({ path: `/${componentName}`, coverUrl, ...meta }); const componentContent = ` import React from 'react'; export default function ${componentName}() { return ( <div dangerouslySetInnerHTML={{ __html: ${JSON.stringify(htmlContent)} }} /> ); }`; fs.writeFileSync(path.join(pagesDir, `${componentName}.js`), componentContent, 'utf-8'); pageMapImports.push(`import ${componentName} from "./pages/${componentName}.js";`); pageMapEntries.push(` "/${componentName}": ${componentName},`); }); fs.writeFileSync(metaFile, JSON.stringify(metadataList, null, 2), 'utf-8'); const pageMapContent = `${pageMapImports.join('\n')} const pageMap = { ${pageMapEntries.join('\n')} }; export default pageMap; `; fs.writeFileSync(path.join(buildDir, 'pageMap.js'), pageMapContent, 'utf-8'); } function generateSite() { const cwd = process.cwd(); // Current command execution path const docDir = path.join(cwd, 'doc'); const configPath = path.join(cwd, 'setting.json'); const __filename = fileURLToPath(import.meta.url); // path to current file const __dirname = path.dirname(__filename); const buildDir = path.join(__dirname, '../', 'src'); const libDir = __dirname; const templateDir = path.join(__dirname, 'template'); if (fs.existsSync(buildDir)) { fse.removeSync(buildDir); } fse.ensureDirSync(buildDir); const config = JSON.parse(fs.readFileSync(configPath, 'utf-8')); fse.copySync(templateDir, buildDir); const indexPath = path.join(buildDir, 'index.html'); let indexHtml = fs.readFileSync(indexPath, 'utf-8'); indexHtml = injectSettings(indexHtml, config); fs.writeFileSync(indexPath, indexHtml, 'utf-8'); if (config.favicon && fs.existsSync(config.favicon)) { const destFavicon = path.join(buildDir, path.basename(config.favicon)); fs.copyFileSync(config.favicon, destFavicon); } if (fs.existsSync(docDir) && fs.existsSync(libDir)) { renderMarkdownPages(docDir, buildDir); renderAboutPage(cwd, buildDir); } console.log(`โœ… Portfolio site generated in ${buildDir}`); } // --- safer empty-dir pruner: never scandir files like CNAME --- async function pruneEmptyDirs(dir, outDir) { let st; try { st = await fse.stat(dir); } catch { return; // path disappeared or unreadable; skip } if (!st.isDirectory()) return; // <-- key line: don't readdir files // Only recurse into subdirectories const dirents = await fse.readdir(dir, { withFileTypes: true }); for (const d of dirents) { if (d.isDirectory()) { await pruneEmptyDirs(path.join(dir, d.name), outDir); } } // After recursion, remove if empty (but never remove the root outDir) const remaining = await fse.readdir(dir); if (remaining.length === 0 && path.resolve(dir) !== path.resolve(outDir)) { await fse.remove(dir); console.log(`๐Ÿ—‚๏ธ Pruned empty dir: ${path.relative(outDir, dir)}`); } } async function cleanAfterBuild({ projectRoot, outDir, srcDir, keepMeta = false }) { // 1) Remove generated src/ safely if (fse.existsSync(srcDir)) { await fse.remove(srcDir); console.log(`๐Ÿงน Removed build dir: ${path.relative(projectRoot, srcDir)}`); } // 2) Delete junk files (maps, meta if not kept). Never touches CNAME. const junkPatterns = [ /\.map$/i, /\.DS_Store$/i, ...(keepMeta ? [] : [/^meta\.json$/i]) ]; const walkFiles = (d) => { const out = []; if (!fse.existsSync(d)) return out; for (const ent of fs.readdirSync(d, { withFileTypes: true })) { const p = path.join(d, ent.name); if (ent.isDirectory()) out.push(...walkFiles(p)); else out.push(p); } return out; }; for (const f of walkFiles(outDir)) { const base = path.basename(f); if (junkPatterns.some(rx => rx.test(base))) { await fse.remove(f); console.log(`๐Ÿงฝ Deleted: ${path.relative(outDir, f)}`); } } // 3) Prune empty dirs (safe for CNAME and other root files) await pruneEmptyDirs(outDir, outDir); console.log('โœ… Clean complete.'); } async function optimizeImages({ inputDirs, outAssetsDir, sizes = [320, 640, 960, 1280] }) { const exts = new Set(['.jpg', '.jpeg', '.png', '.webp']); const manifest = {}; // maps original (relative) -> { placeholder, srcset, fallback } await fse.ensureDir(outAssetsDir); for (const dir of inputDirs) { if (!fse.existsSync(dir)) continue; const files = walkFiles(dir).filter(f => exts.has(path.extname(f).toLowerCase())); for (const abs of files) { const relFromSrc = path.relative(srcDir, abs).replace(/\\/g, '/'); // e.g. covers/foo.jpg const { width = 1280 } = await sharp(abs).metadata(); const baseName = path.basename(abs, path.extname(abs)); const folderRel = path.dirname(relFromSrc); // e.g. covers const outFolder = path.join(outAssetsDir, folderRel); await fse.ensureDir(outFolder); // tiny blur-up placeholder const placeholderBuf = await sharp(abs).resize(24).webp({ quality: 35 }).toBuffer(); const placeholder = `data:image/webp;base64,${placeholderBuf.toString('base64')}`; // responsive variants (webp + jpeg fallback) const outputs = []; for (const w of sizes.filter(s => s <= width).length ? sizes.filter(s => s <= width) : [width]) { const webpName = `${baseName}-${w}.webp`; const jpgName = `${baseName}-${w}.jpg`; await sharp(abs).resize(w).webp({ quality: 78 }).toFile(path.join(outFolder, webpName)); await sharp(abs).resize(w).jpeg({ quality: 82, mozjpeg: true }).toFile(path.join(outFolder, jpgName)); outputs.push({ w, webp: `${folderRel}/${webpName}`, jpg: `${folderRel}/${jpgName}` }); } // build srcset strings const webpSrcset = outputs.map(o => `assets/${o.webp} ${o.w}w`).join(', '); const jpgSrcset = outputs.map(o => `assets/${o.jpg} ${o.w}w`).join(', '); const largest = outputs[outputs.length - 1]; manifest[relFromSrc] = { placeholder, srcset: { webp: webpSrcset, jpg: jpgSrcset, }, fallback: `assets/${largest.webp}`, sizes: '(max-width: 640px) 100vw, 640px' }; } } const manifestPath = path.join(outAssetsDir, 'image-manifest.json'); await fse.writeJson(manifestPath, manifest, { spaces: 2 }); console.log(`๐Ÿ–ผ๏ธ Image manifest โ†’ ${path.relative(outDir, manifestPath)}`); return manifest; } async function precompress(dir) { const candidates = walkFiles(dir).filter(f => /\.(js|css|html|json|svg|xml|txt|ttf|woff2?)$/i.test(f) ); for (const file of candidates) { await new Promise((res, rej) => createReadStream(file).pipe(zlib.createGzip()).pipe(createWriteStream(file + '.gz')) .on('finish', res).on('error', rej) ); await new Promise((res, rej) => createReadStream(file).pipe(zlib.createBrotliCompress()).pipe(createWriteStream(file + '.br')) .on('finish', res).on('error', rej) ); } console.log(`๐Ÿ—œ๏ธ Precompressed ${candidates.length} assets (.gz & .br)`); } async function patchIndexHtml({ cdnReact = false, entryBase = 'index' }) { const htmlPath = path.join(outDir, 'index.html'); if (!await fse.pathExists(htmlPath)) return; let html = await fse.readFile(htmlPath, 'utf-8'); // 1) Ensure images default to lazy/async/fetchpriority const lazyScript = ` <script> document.addEventListener('DOMContentLoaded',function(){ for(const img of document.querySelectorAll('img')){ if(!img.hasAttribute('loading')) img.setAttribute('loading','lazy'); if(!img.hasAttribute('decoding')) img.setAttribute('decoding','async'); if(!img.hasAttribute('fetchpriority')) img.setAttribute('fetchpriority','low'); } }); </script>`.trim(); if (!/fetchpriority/.test(html)) { html = html.replace('</body>', `${lazyScript}\n</body>`); } // 2) If using CDN externals, inject an import map for React if (cdnReact && !/type=["']importmap["']/.test(html)) { const importMap = ` <script type="importmap"> { "imports": { "react": "https://esm.sh/react@18", "react-dom": "https://esm.sh/react-dom@18", "react-dom/client": "https://esm.sh/react-dom@18/client", "react/jsx-runtime": "https://esm.sh/react@18/jsx-runtime" } } </script> `.trim(); html = html.replace('</head>', `${importMap}\n</head>`); } // 3) Find the emitted entry file (index.js or index-<hash>.js) const files = await fse.readdir(outDir); const entryFile = files.find(f => new RegExp(`^${entryBase}(?:-[\\w-]+)?\\.js$`).test(f)); if (entryFile) { const moduleTag = `<script type="module" src="./${entryFile}"></script>`; const scriptRx = /<script[^>]*src=["'][^"']*index[^"']*\.js["'][^>]*>\s*<\/script>/i; if (scriptRx.test(html)) { html = html.replace(scriptRx, moduleTag); } else { // If there's no script yet, inject before </body> html = html.replace('</body>', `${moduleTag}\n</body>`); } } else { console.warn('โš ๏ธ Could not find built entry (index*.js) in dist.'); } await fse.writeFile(htmlPath, html, 'utf-8'); console.log('๐Ÿงฉ Patched index.html (module entry + lazy images + optional React import map)'); } async function buildSite (argv) { const isProd = argv.prod ?? true; // 1) Generate static content generateSite(); // 2) Optimize images (covers/, assets/images/) const assetsOut = path.join(outDir, 'assets'); await optimizeImages({ inputDirs: [path.join(srcDir, 'covers'), path.join(srcDir, 'assets', 'images')].filter(fse.existsSync), outAssetsDir: assetsOut }); // 3) Bundle with esbuild const entry = path.resolve(projectRoot, 'src', 'index.js'); const result = await build({ entryPoints: [entry], bundle: true, splitting: true, format: 'esm', outdir: outDir, jsx: 'automatic', loader: { '.js': 'jsx', '.png': 'file', '.jpg': 'file', '.jpeg': 'file', '.webp': 'file', '.svg': 'file', '.css': 'css', '.html': 'text', '.json': 'json', }, assetNames: 'assets/[name]-[hash]', chunkNames: 'chunks/[name]', entryNames: '[name]', sourcemap: isProd ? false : 'linked', minify: isProd, treeShaking: true, target: isProd ? ['es2020'] : ['esnext'], define: { 'process.env.NODE_ENV': isProd ? '"production"' : '"development"' }, drop: isProd ? ['console', 'debugger'] : [], legalComments: 'none', metafile: !!argv.analyze, external: ['react', 'react-dom', 'react-dom/client'], logLevel: 'info', }); if (argv.analyze && result.metafile) { const metaPath = path.join(outDir, 'meta.json'); await fse.writeJson(metaPath, result.metafile, { spaces: 2 }); console.log(`๐Ÿ“Š Bundle metadata โ†’ ${path.relative(outDir, metaPath)} (use: npx esbuild-visualizer --metadata ${path.basename(metaPath)})`); } // 4) Copy static files (keep originals, but prefer optimized at runtime) const copyTargets = [ ['rawList.json', 'rawList.json'], ['covers', 'covers'], // originals kept; optimized live under assets/ ['assets', 'assets'], // includes images/, fonts/, etc. ['index.html', 'index.html'], ]; copyTargets.forEach(([fromRel, toRel]) => { const from = path.join(srcDir, fromRel); const to = path.join(outDir, toRel); if (fse.existsSync(from)) { fse.copySync(from, to, { overwrite: true }); console.log(`โœ… Copied: ${fromRel} โ†’ ${toRel}`); } }); // 5) Patch index.html (lazy img + optional React import map) await patchIndexHtml({ cdnReact: true }); // 6) Precompress assets for CDN/static hosts await precompress(outDir); await cleanAfterBuild({ projectRoot, outDir, // where your final site lives (you already have this) srcDir, // the temporary folder your generateSite() created keepMeta: !!argv.analyze }); console.log('๐Ÿš€ Build complete.'); } export { buildSite };