tm-text
Version:
Trackmania and Maniaplanet text parser and formatter
160 lines (159 loc) • 6.73 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.tokenize = exports.findNextHrefEndIndex = exports.initTokens = exports.stringifyTokens = exports.extractWords = exports.isColor = exports.getColor = exports.getKindByChar = exports.isKindWithPrefix = exports.isLinkKind = exports.isHrefKind = exports.isCssKind = exports.isWidthKind = void 0;
const object_entries_1 = require("../utils/object-entries");
const options_1 = require("../utils/options");
const syntax_1 = require("../utils/syntax");
const isWidthKind = (kind) => kind === syntax_1.TOKEN.WIDTH_NARROW
|| kind === syntax_1.TOKEN.WIDTH_NORMAL
|| kind === syntax_1.TOKEN.WIDTH_WIDE;
exports.isWidthKind = isWidthKind;
const isCssKind = (kind) => (0, exports.isWidthKind)(kind)
|| kind === syntax_1.TOKEN.BOLD
|| kind === syntax_1.TOKEN.COLOR
|| kind === syntax_1.TOKEN.ITALIC
|| kind === syntax_1.TOKEN.SHADOW
|| kind === syntax_1.TOKEN.UPPERCASE;
exports.isCssKind = isCssKind;
const isHrefKind = (kind) => kind === syntax_1.TOKEN.HREF_CONTENT
|| kind === syntax_1.TOKEN.HREF_END
|| kind === syntax_1.TOKEN.HREF_START;
exports.isHrefKind = isHrefKind;
const isLinkKind = (kind) => kind === syntax_1.TOKEN.LINK_EXTERNAL
|| kind === syntax_1.TOKEN.LINK_INTERNAL
|| kind === syntax_1.TOKEN.LINK_INTERNAL_WITH_PARAMS;
exports.isLinkKind = isLinkKind;
const isKindWithPrefix = (kind) => !(0, exports.isHrefKind)(kind)
&& kind !== syntax_1.TOKEN.NEWLINE
&& kind !== syntax_1.TOKEN.TAB
&& kind !== syntax_1.TOKEN.WORD;
exports.isKindWithPrefix = isKindWithPrefix;
const getKindByChar = (char) => {
var _a;
return (_a = (0, object_entries_1.objectEntries)(syntax_1.TOKEN_TO_CHAR_MAP)
.find(([, c]) => c === char.toLowerCase())) === null || _a === void 0 ? void 0 : _a[0];
};
exports.getKindByChar = getKindByChar;
const getColor = (input, firstCharIndex) => input.substring(firstCharIndex, firstCharIndex + 3);
exports.getColor = getColor;
const isColor = (input, firstCharIndex) => !!(0, exports.getColor)(input, firstCharIndex).match(/[0-9a-f]{3}/i);
exports.isColor = isColor;
const extractWords = (tokens) => tokens.filter(({ kind }) => kind === syntax_1.TOKEN.WORD);
exports.extractWords = extractWords;
const stringifyTokens = (tokens) => tokens.map(({ content }) => content).join('');
exports.stringifyTokens = stringifyTokens;
const initTokens = (input, options) => {
const tokens = [];
const addToken = (kind, content, charIndex) => {
const hasPrefix = (0, exports.isKindWithPrefix)(kind);
const prefixLength = hasPrefix ? 1 : 0;
const previousToken = tokens.at(-1);
let tokenKind = kind;
if (!syntax_1.SYNTAX_MAP[options.syntax].includes(kind)
|| ((0, exports.isHrefKind)(tokenKind) && !(0, exports.isHrefKind)(previousToken === null || previousToken === void 0 ? void 0 : previousToken.kind) && !(0, exports.isLinkKind)(previousToken === null || previousToken === void 0 ? void 0 : previousToken.kind))) {
if (hasPrefix) {
return;
}
tokenKind = syntax_1.TOKEN.WORD;
}
if (tokenKind === syntax_1.TOKEN.WORD && (previousToken === null || previousToken === void 0 ? void 0 : previousToken.kind) === syntax_1.TOKEN.WORD) {
previousToken.content += content;
previousToken.pos.end += 1;
return;
}
tokens.push({
kind: tokenKind,
content: hasPrefix ? `$${content}` : content,
pos: {
start: charIndex - prefixLength,
end: charIndex + content.length,
},
});
};
return {
add: addToken,
addColor: (firstCharIndex) => addToken(syntax_1.TOKEN.COLOR, (0, exports.getColor)(input, firstCharIndex), firstCharIndex),
addWord: (content, charIndex) => addToken(syntax_1.TOKEN.WORD, content, charIndex),
get: () => tokens,
};
};
exports.initTokens = initTokens;
const findNextHrefEndIndex = (input, startIndex) => {
const index = input
.split('')
.findIndex((char, i) => i > startIndex && char === syntax_1.TOKEN_TO_CHAR_MAP.HREF_END);
return index > -1
? index
: null;
};
exports.findNextHrefEndIndex = findNextHrefEndIndex;
const tokenize = (input, options) => {
const opts = (0, options_1.withDefaultOptions)(options);
const tokens = (0, exports.initTokens)(input, opts);
let charsToSkip = 0;
let previousCharIsDollar = false;
input.split('').forEach((char, index) => {
if (charsToSkip > 0) {
charsToSkip -= 1;
return;
}
if (char === '$') {
if (previousCharIsDollar) {
tokens.addWord(char, index);
}
previousCharIsDollar = !previousCharIsDollar;
return;
}
if (char === '\u00A0' || char === '\v') {
previousCharIsDollar = false;
return;
}
if ('\f\n\r\t\u2028\u2029'.includes(char)) {
previousCharIsDollar = false;
if (char === '\t') {
tokens.add(syntax_1.TOKEN.TAB, ' ', index);
}
else {
tokens.add(syntax_1.TOKEN.NEWLINE, ' ', index);
}
return;
}
if (previousCharIsDollar) {
previousCharIsDollar = false;
}
else {
tokens.addWord(char, index);
return;
}
if ((0, exports.isColor)(input, index)) {
tokens.addColor(index);
charsToSkip = 2;
return;
}
const kind = (0, exports.getKindByChar)(char);
if (!kind) {
return;
}
tokens.add(kind, char, index);
if (!(0, exports.isLinkKind)(kind)) {
return;
}
const nextChar = input.at(index + 1);
if (nextChar !== syntax_1.TOKEN_TO_CHAR_MAP.HREF_START) {
return;
}
tokens.add(syntax_1.TOKEN.HREF_START, nextChar, index + 1);
const hrefEndIndex = (0, exports.findNextHrefEndIndex)(input, index);
if (!hrefEndIndex) {
return;
}
const href = input.substring(index + 2, hrefEndIndex);
const sanitizedHref = (0, exports.stringifyTokens)((0, exports.extractWords)((0, exports.tokenize)(href)));
tokens.add(syntax_1.TOKEN.HREF_CONTENT, sanitizedHref, index + 2);
tokens.add(syntax_1.TOKEN.HREF_END, syntax_1.TOKEN_TO_CHAR_MAP.HREF_END, hrefEndIndex);
charsToSkip += href.length + 2;
});
return tokens.get();
};
exports.tokenize = tokenize;
exports.default = exports.tokenize;