UNPKG

@gravityforms/gulp-tasks

Version:
446 lines (412 loc) 12.7 kB
const { extname } = require( 'path' ); const exec = require( 'child_process' ).exec; const { createHash } = require( 'crypto' ); const { glob } = require( 'glob' ); const showdown = require( 'showdown' ); const json2php = require( 'json2php' ); const { resolve } = require( 'path' ); const fs = require( 'fs' ); const path = require( 'path' ); const fsPromises = fs.promises; const fsConstants = fs.constants; const { contentWrapperOpen, contentWrapperClose, footer, head, menu, nav } = require( '../templates/doc' ); const markdownConverter = new showdown.Converter(); const lexer = require( './utils/lexer' ); /** * @function copyFolderSync * @description Copies files and folders in node from src to dest. * * @since 1.6.2 * * @param {string} from Source directory. * @param {string} to Destination. * * @return {void} */ function copyFolderSync( from, to ) { if ( ! fs.existsSync( to ) ) { fs.mkdirSync( to, { recursive: true } ); } fs.readdirSync( from ).forEach( ( element ) => { if ( fs.lstatSync( path.join( from, element ) ).isFile() ) { fs.copyFileSync( path.join( from, element ), path.join( to, element ) ); } else { copyFolderSync( path.join( from, element ), path.join( to, element ) ); } } ); } async function deleteDirectory( dirPath ) { try { await fsPromises.rm( dirPath, { recursive: true, force: true } ); console.log( `Directory ${ dirPath } deleted successfully.` ); } catch ( err ) { console.error( `Error while deleting ${ dirPath }.`, err ); } } /** * * @param str */ function untrailingSlashIt( str ) { return str.replace( /\/$/, '' ); } /** * * @param str */ function trailingSlashIt( str ) { return untrailingSlashIt( str ) + '/'; } /** * @function stringify * @description Takes data objects, parses them as json, sends them through json2php to convert them * to php multidimensional array, and prefixes with php opener. * * @since 1.3.25 * * @param contents {object} The data object to convert. * * @return {string} */ function stringify( contents ) { return `<?php return ${ json2php( JSON.parse( JSON.stringify( contents ) ) ) };`; } /** * @function listDir * @description Async function that reads a directory and returns filenames for all the files in it. * * @since 1.3.25 * * @param dir {string} The full path to the directory to list. * @return {Promise<string[]>} */ async function listDir( dir ) { try { // Check if the directory exists and is accessible await fsPromises.access( dir, fsConstants.R_OK ); // Read the directory return fsPromises.readdir( dir ); } catch ( err ) { console.error( 'Error occurred: ', err.message ); // Return an empty array when an error occurs (e.g., directory does not exist) return []; } } /** * @function asyncForEach * @description Async version of forEach to allow us to do async operations in a loop. * * @since 1.3.25 * * @param array {array} The array to iterate. * @param callback {function} The async function to execute. * * @return {Promise<void>} */ async function asyncForEach( array, callback ) { for ( let index = 0; index < array.length; index++ ) { await callback( array[ index ], index, array ); } } /** * @function filehash * @description Return a md4 hash for a given file using Node's crypto method createHash. * * @since 1.3.25 * * @param filename {string} The filename including path to process. * @param algorithm {string} The algorithm to apply. * * @return {Promise<unknown>} */ async function filehash( filename, algorithm = 'md5' ) { // eslint-disable-next-line no-undef return new Promise( ( res, reject ) => { const shasum = createHash( algorithm ); try { const s = fs.ReadStream( filename ); s.on( 'data', function( data ) { shasum.update( data ); } ); s.on( 'end', function() { const hash = shasum.digest( 'hex' ); return res( hash ); } ); } catch ( error ) { return reject( 'Hash calculation fail' ); } } ); } /** * @description Executes a shell command and return it as a Promise. * * @param cmd {string} * @param printOutput {boolean} * * @return {Promise<string>} */ function execShellCommand( cmd, printOutput = true ) { return new Promise( ( resolve, reject ) => { const execCmd = exec( cmd, ( error, stdout, stderr ) => { if ( error ) { console.warn( error ); } resolve( stdout ? stdout : stderr ); } ); if ( printOutput ) { execCmd.stdout.on( 'data', function( data ) { console.log( data ); } ); } } ); } /** * @function writeManifestFile * @description Write an assets.php file with Node that maps hashes to filenames. * * @since 1.3.25 * * @param manifestDir {string} The directory to output the manifest file. * @param combinedAssetData {object} The data to stringify. */ function writeManifestFile( manifestDir, combinedAssetData ) { const manifestFile = resolve( manifestDir, 'assets.php' ); if ( ! fs.existsSync( manifestDir ) ) { console.log( `Directory does not exist: ${ manifestDir }, skipping asset php write.` ); return; } fs.writeFile( manifestFile, stringify( combinedAssetData ), ( err ) => { if ( err ) { throw err; } console.log( `Manifest file written successfully to ${ manifestDir }.` ); } ); } /** * @function formatMenuItem * @description Formats a menu string from filename to nav label format. * * @since 1.9.6 * * @param {string} fileName * * @return {string} */ const formatMenuItem = ( fileName ) => { return fileName .replace( /_|-/g, ' ' ) .trim() .toLowerCase() .split( ' ' ) .map( ( word ) => word.charAt( 0 ).toUpperCase() + word.slice( 1 ) ) .join( ' ' ); }; /** * @function getMenuData * @description If @navsection and or @navlabel are found in the input string for a doc page, * extract and return array containing extracted values for use in nav template file. * * @since 1.9.6 * * @param {string} input * @param {string} fileName * @param {object} config * * @return {object|null} */ const getMenuData = ( input, fileName, config ) => { // if @navignore tag is found, do not send to nav. if ( input.includes( '@navignore' ) ) { return null; } const sectionMatch = input.match( /^@navsection(.*)$/m ); const labelMatch = input.match( /^@navlabel(.*)$/m ); const cleanedFileName = fileName.split( '.' ).slice( 0, -1 ).join( '.' ); let section = sectionMatch ? sectionMatch[ 1 ].trim() : ''; let label = labelMatch ? labelMatch[ 1 ].trim() : ''; // If no section passed try to use default from config, or then fileName. if ( ! section.length ) { section = config?.defaultNavSection || formatMenuItem( cleanedFileName.split( '.' ).shift() ); } // If no label, use filename. if ( ! label.length ) { label = formatMenuItem( cleanedFileName.substring( cleanedFileName.lastIndexOf( '.' ) + 1 ) ); } return { section, label, fileName }; }; /** * @function cleanText * @description Clean up tags that are used for doc generation but should not be included in the final output. * * @since 1.9.6 * * @param {string} input * * @return {string} */ const cleanText = ( input ) => { return input .replace( /@navignore/g, '' ) .replace( /^@navsection(.*)$/m, '' ) .replace( /^@navlabel(.*)$/m, '' ); }; /** * @function parseText * @description Run passed text from our doc generator through the lexer. * * @since 1.9.6 * * @param {string} input Input string to parse. * @return {string} */ function parseText( input ) { const { tokens, symbolizedText } = lexer.lex( input ); return lexer.processTokens( tokens, symbolizedText ); } function moduleExists( name ) { try { return require.resolve( name ); } catch ( e ) { return false; } } module.exports = { asyncForEach, copyFolderSync, deleteDirectory, execShellCommand, moduleExists, trailingSlashIt, extractCommentsToFiles: async ( configs ) => { if ( configs.length === 0 ) { return; } await asyncForEach( configs, async ( { ext = 'txt', input = '', output = '', root = '' } ) => { try { const files = await glob( input ); if ( files.length ) { // first create output directory if it doesn't exist if ( ! fs.existsSync( output ) ) { fs.mkdirSync( output, { recursive: true } ); } // now iterate over files array and create extraction files for each file await asyncForEach( files, async ( file ) => { const fileWithLocalDir = file.replace( root, '' ); const tempFileName = fileWithLocalDir.replace( /\.[^/.]+$/, `.${ ext }` ); const fileName = tempFileName.replace( /\//g, '.' ); const outputFile = `${ trailingSlashIt( output ) }${ fileName }`; await execShellCommand( `extract-documentation-comments -I ${ file } -O ${ outputFile }` ); } ); } } catch ( err ) { console.error( err ); } } ); }, generateHTMLFiles: async ( configs ) => { if ( configs.length === 0 ) { return; } await asyncForEach( configs, async ( config ) => { const ext = config.ext || 'txt'; copyFolderSync( path.resolve( `${ __dirname }/../`, 'assets/doc' ), config.output ); try { const files = await glob( `${ trailingSlashIt( config.output ) }*.${ ext }` ); const contentData = []; config.menuItems = []; await asyncForEach( files, async ( file ) => { // get the file name from the full path, probably a text file const originalFileName = file.split( '/' ).pop(); // remove the extension from the file name const fileNameNoExt = originalFileName.split( '.' ).slice( 0, - 1 ).join( '.' ).trim(); // if the config.indexFile property is set, and the name matches the file name, set the file name to index.html. Or just replace ext to html. const fileName = config?.indexFile === fileNameNoExt ? 'index.html' : originalFileName.replace( /\.[^/.]+$/, `.html` ); // create file output path with new filename const outputFile = `${ trailingSlashIt( config.output ) }${ fileName }`; // read in text from file const rawText = fs.readFileSync( file ).toString(); // get the menu data const menuData = getMenuData( rawText, fileName, config ); // run the text through the lexer after cleaning out non-applicable @ directives. const txt = parseText( cleanText( rawText ) ); // convert the parsed markdown to html const content = markdownConverter.makeHtml( txt ); // store the parsed data for use in following write loop. contentData.push( { content, fileName, outputFile, } ); // if getMenuData did not return null, push to the menuItems array for use in the menu template in following loop. if ( menuData ) { config.menuItems.push( menuData ); } // delete the markdown source file fs.unlinkSync( file ); } ); // write the data to files await asyncForEach( contentData, async ( { content, outputFile, } ) => { // build the page const html = `${ head( config ) } ${ menu( config ) } ${ nav( config ) } ${ contentWrapperOpen( config ) } ${ content } ${ contentWrapperClose( config ) } ${ footer( config ) } `; // write the html file fs.writeFile( outputFile, html, function( error ) { if ( error ) { return console.log( error ); } } ); } ); } catch ( err ) { console.error( err ); } } ); }, /** * @function writeJSData * @description Process a directory of files and generate hashes for them, finally outputting them as a php file * which maps filenames to their hashes as a multidimensional array. * * @since 1.3.25 * * @param dir {string} The directory to parse. * @param fileNamesToProcess {array} The array of filenames without extension to generate manifest for. * @param ext {string} The extension to process. * * @return {Promise<void>} */ writeHashData: async ( dir, fileNamesToProcess = [], ext = 'js' ) => { const jsFiles = await listDir( dir ); if ( ! jsFiles ) { return; } const data = { hash_map: {}, }; await asyncForEach( jsFiles, async ( file ) => { const extension = extname( file ); if ( extension !== `.${ ext }` ) { return; } const fileNameArray = file.split( '.' ); // filenames that exceed 4 array items are chunks, do not process if ( ext === 'js' && ( fileNameArray.length >= 4 || ! fileNamesToProcess.includes( fileNameArray[ 0 ] ) ) ) { return; } // store chunkhash as version const version = await filehash( `${ dir }/${ file }` ); data.hash_map[ file ] = { version, file }; } ); writeManifestFile( dir, data ); }, };