@gravityforms/gulp-tasks
Version:
Configurable Gulp tasks for use in Gravity Forms projects.
140 lines (122 loc) • 3.47 kB
JavaScript
const tokens = require( './tokens/tokens' );
/**
* @function getUniqSymbol
* @description Get a unique symbol to use as a placeholder for tokens.
*
* @since 1.9.6
*
* @return {number} The unique symbol.
*/
function getUniqSymbol() {
return Date.now() + Math.random();
}
/**
* @function identifyNewToken
* @description Identify a token type based on the line being parsed.
*
* @since 1.9.6
*
* @param {string} line The line being parsed.
*
* @return {object} The new token.
*/
function identifyNewToken( line ) {
for ( const type in tokens ) {
const token = tokens[ type ];
const test = new token( line );
if ( test.isToken() ) {
return test;
}
}
return new tokens.plainTextToken( line );
}
/**
* @function lexFile
* @description Lex the given file text for tokens.
*
* @since 1.9.6
*
* @param {string} text The file text to be lexxed.
*
* @return {object} The tokens and symbolized text.
*/
function lexFile( text ) {
const lines = text.split( '\n' );
let symbolizedText = '';
let currentToken = false;
const foundTokens = {};
lines.forEach( ( lineItem, idx ) => {
const line = lineItem;
const prevLine = idx === 0 ? '' : lines[ idx - 1 ];
if ( ! currentToken ) {
currentToken = identifyNewToken( line );
}
if (
( currentToken && currentToken.endConditionReached( line, prevLine ) ) ||
( currentToken.type() === 'plaintext' && identifyNewToken( line ).type() !== 'plaintext' )
) {
const symbol = getUniqSymbol();
symbolizedText += symbol;
foundTokens[ symbol ] = currentToken;
currentToken = identifyNewToken( line );
}
if ( currentToken && ! currentToken.endConditionReached( line, prevLine ) ) {
currentToken.addLine( line );
}
if ( currentToken && idx === lines.length - 1 ) {
const symbol = getUniqSymbol();
symbolizedText += symbol;
foundTokens[ symbol ] = currentToken;
}
} );
return { foundTokens, symbolizedText };
}
/**
* @function parseToken
* @description Parse a token into its correct contents.
*
* @since 1.9.6
*
* @param {string} sym The symbol to replace with the rendered content.
* @param {object} token The current token being evaluated.
* @param {string} updatedText The current file text containing symbols to be replaced.
*
* @return {string} The file text with the contents updated with token markup.
*/
function parseToken( sym, token, updatedText ) {
return updatedText.replace( sym, token.getContent() );
}
/**
* @function processTokens
* @description Process the tokens found in a file.
*
* @since 1.9.6
*
* @param {Array} foundTokens The current tokens to be processed.
* @param {string} text The file text containing the symbols to replace.
*
* @return {string} The file text with the contents updated with tokens markup.
*/
function processTokens( foundTokens, text ) {
let updatedText = text;
for ( const sym in foundTokens ) {
const token = foundTokens[ sym ];
updatedText = parseToken( sym, token, updatedText );
}
return updatedText;
}
/**
* @function lex
* @description Lex a file for tokens and it's symbolized version for processing.
*
* @since 1.9.6
*
* @param {string} text The file text to process.
*
* @return {object} The tokens and symbolized text found for the file.
*/
function lex( text ) {
const { foundTokens, symbolizedText } = lexFile( text );
return { tokens: foundTokens, symbolizedText };
}
module.exports = { lex, processTokens };