@npmstuff/argdown-core
Version:
A pluggable parser for the Argdown argumentation syntax
631 lines • 26 kB
JavaScript
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.tokenize = exports.EOF = exports.UnusedControlChar = exports.Freestyle = exports.SpecialChar = exports.EscapedChar = exports.Spaces = exports.Newline = exports.Tag = exports.Link = exports.Comment = exports.UnderscoreItalicEnd = exports.UnderscoreItalicStart = exports.AsteriskItalicEnd = exports.AsteriskItalicStart = exports.UnderscoreBoldEnd = exports.UnderscoreBoldStart = exports.AsteriskBoldEnd = exports.AsteriskBoldStart = exports.HeadingStart = exports.ArgumentMention = exports.ArgumentReference = exports.ArgumentDefinition = exports.StatementNumber = exports.StatementMention = exports.StatementReference = exports.StatementDefinition = exports.Dedent = exports.Indent = exports.Emptyline = exports.UnorderedListItem = exports.OrderedListItem = exports.InferenceEnd = exports.ListDelimiter = exports.Data = exports.FrontMatter = exports.InferenceStart = exports.OutgoingUndercut = exports.IncomingUndercut = exports.Contradiction = exports.OutgoingAttack = exports.OutgoingSupport = exports.IncomingAttack = exports.IncomingSupport = exports.tokenList = void 0;
const chevrotain = __importStar(require("chevrotain"));
const lodash_last_1 = __importDefault(require("lodash.last"));
const lodash_partialright_1 = __importDefault(require("lodash.partialright"));
const TokenNames_1 = require("./TokenNames");
const utils_1 = require("./utils");
const createToken = chevrotain.createToken;
const createTokenInstance = chevrotain.createTokenInstance;
const tokenMatcher = chevrotain.tokenMatcher;
let indentStack = [];
let rangesStack = [];
exports.tokenList = [];
const init = () => {
indentStack = [0];
rangesStack = [];
};
const getCurrentLine = (tokens) => {
if ((0, utils_1.arrayIsEmpty)(tokens))
return 1;
const lastToken = (0, lodash_last_1.default)(tokens);
let currentLine = lastToken ? lastToken.endLine : 1;
if (lastToken &&
(chevrotain.tokenMatcher(lastToken, exports.Emptyline) ||
chevrotain.tokenMatcher(lastToken, exports.Newline))) {
currentLine++;
}
return currentLine;
};
const getCurrentEndOffset = (tokens) => {
if ((0, utils_1.arrayIsEmpty)(tokens))
return 0;
const lastToken = (0, lodash_last_1.default)(tokens);
return lastToken ? lastToken.endOffset : 0;
};
const lastTokenIsNewline = (lastToken) => {
if (lastToken == undefined)
return false;
return tokenMatcher(lastToken, exports.Newline);
};
const emitRemainingDedentTokens = (matchedTokens) => {
if (indentStack.length <= 1) {
return;
}
const lastToken = (0, lodash_last_1.default)(matchedTokens);
const startOffset = getCurrentEndOffset(matchedTokens);
const endOffset = startOffset;
const startLine = getCurrentLine(matchedTokens);
const endLine = startLine;
const startColumn = lastToken && lastToken.endColumn ? lastToken.endColumn : 0;
const endColumn = startColumn;
while (indentStack.length > 1) {
matchedTokens.push(createTokenInstance(exports.Dedent, "", startOffset, endOffset, startLine, endLine, startColumn, endColumn));
indentStack.pop();
}
};
const emitIndentOrDedent = (matchedTokens, indentStr) => {
const currIndentLevel = indentStr.length;
let lastIndentLevel = (0, lodash_last_1.default)(indentStack) || 0;
const image = "";
const startOffset = getCurrentEndOffset(matchedTokens) + 1;
const endOffset = startOffset + indentStr.length - 1;
const startLine = getCurrentLine(matchedTokens);
const endLine = startLine;
const startColumn = 1;
const endColumn = startColumn + indentStr.length - 1;
if (currIndentLevel > lastIndentLevel) {
indentStack.push(currIndentLevel);
let indentToken = createTokenInstance(exports.Indent, image, startOffset, endOffset, startLine, endLine, startColumn, endColumn);
matchedTokens.push(indentToken);
}
else if (currIndentLevel < lastIndentLevel) {
while (indentStack.length > 1 && currIndentLevel < lastIndentLevel) {
indentStack.pop();
lastIndentLevel = (0, lodash_last_1.default)(indentStack) || 0;
let dedentToken = createTokenInstance(exports.Dedent, image, startOffset, endOffset, startLine, endLine, startColumn, endColumn);
matchedTokens.push(dedentToken);
}
}
};
const matchRelation = (text, offset, tokens, _groups, pattern) => {
const remainingText = text.substr(offset || 0);
const lastToken = (0, lodash_last_1.default)(tokens);
const afterNewline = lastTokenIsNewline(lastToken);
const afterEmptyline = lastToken && tokenMatcher(lastToken, exports.Emptyline);
if ((0, utils_1.arrayIsEmpty)(tokens) || afterEmptyline || afterNewline) {
let match = pattern.exec(remainingText);
if (match !== null && match.length == 3) {
const indentStr = match[1];
emitIndentOrDedent(tokens, indentStr);
return match;
}
}
return null;
};
const matchIncomingSupport = (0, lodash_partialright_1.default)(matchRelation, /^([' '\t]*)(\+>)/);
const matchIncomingAttack = (0, lodash_partialright_1.default)(matchRelation, /^([' '\t]*)(->)/);
const matchOutgoingSupport = (0, lodash_partialright_1.default)(matchRelation, /^([' '\t]*)( \+)/);
const matchOutgoingAttack = (0, lodash_partialright_1.default)(matchRelation, /^([' '\t]*)( -)/);
const matchContradiction = (0, lodash_partialright_1.default)(matchRelation, /^([' '\t]*)(><)/);
const matchIncomingUndercut = (0, lodash_partialright_1.default)(matchRelation, /^([' '\t]*)(_>)/);
const matchOutgoingUndercut = (0, lodash_partialright_1.default)(matchRelation, /^([' '\t]*)(<_|(?:_(?=\s)))/);
exports.IncomingSupport = createToken({
name: TokenNames_1.TokenNames.INCOMING_SUPPORT,
pattern: matchIncomingSupport,
line_breaks: true,
label: "+> (Incoming Support)",
start_chars_hint: [" ", "\t", "+"]
});
exports.tokenList.push(exports.IncomingSupport);
exports.IncomingAttack = createToken({
name: TokenNames_1.TokenNames.INCOMING_ATTACK,
pattern: matchIncomingAttack,
line_breaks: true,
label: "-> (Incoming Attack)",
start_chars_hint: [" ", "\t", "-"]
});
exports.tokenList.push(exports.IncomingAttack);
exports.OutgoingSupport = createToken({
name: TokenNames_1.TokenNames.OUTGOING_SUPPORT,
pattern: matchOutgoingSupport,
line_breaks: true,
label: "<+ (Outgoing Support)",
start_chars_hint: [" ", "\t", "<"]
});
exports.tokenList.push(exports.OutgoingSupport);
exports.OutgoingAttack = createToken({
name: TokenNames_1.TokenNames.OUTGOING_ATTACK,
pattern: matchOutgoingAttack,
line_breaks: true,
label: "<- (Outgoing Attack)",
start_chars_hint: [" ", "\t", "<"]
});
exports.tokenList.push(exports.OutgoingAttack);
exports.Contradiction = createToken({
name: TokenNames_1.TokenNames.CONTRADICTION,
pattern: matchContradiction,
line_breaks: true,
label: ">< (Contradiction)",
start_chars_hint: [" ", "\t", ">"]
});
exports.tokenList.push(exports.Contradiction);
exports.IncomingUndercut = createToken({
name: TokenNames_1.TokenNames.INCOMING_UNDERCUT,
pattern: matchIncomingUndercut,
line_breaks: true,
label: "_> (Incoming Undercut)",
start_chars_hint: [" ", "\t", "_"]
});
exports.tokenList.push(exports.IncomingUndercut);
exports.OutgoingUndercut = createToken({
name: TokenNames_1.TokenNames.OUTGOING_UNDERCUT,
pattern: matchOutgoingUndercut,
line_breaks: true,
label: "<_ (Outgoing Undercut)",
start_chars_hint: [" ", "\t", "<"]
});
exports.tokenList.push(exports.OutgoingUndercut);
const inferenceStartPattern = /^[' '\t]*-{2}/;
const matchInferenceStart = (text, offset, tokens) => {
let remainingText = text.substr(offset || 0);
const lastToken = (0, lodash_last_1.default)(tokens);
let afterNewline = lastTokenIsNewline(lastToken);
if ((0, utils_1.arrayIsEmpty)(tokens) || afterNewline) {
const match = inferenceStartPattern.exec(remainingText);
if (match != null) {
emitRemainingDedentTokens(tokens);
return match;
}
}
return null;
};
exports.InferenceStart = createToken({
name: TokenNames_1.TokenNames.INFERENCE_START,
pattern: matchInferenceStart,
push_mode: "inference_mode",
line_breaks: true,
label: "-- (Inference Start)",
start_chars_hint: [" ", "\t", "-"]
});
exports.tokenList.push(exports.InferenceStart);
exports.FrontMatter = createToken({
name: TokenNames_1.TokenNames.FRONT_MATTER,
pattern: /===+[\s\S]*?===+/,
label: "Front Matter (YAML)"
});
exports.tokenList.push(exports.FrontMatter);
exports.Data = createToken({
name: TokenNames_1.TokenNames.DATA,
pattern: /{((?!}\s[^\,}])(.|\n))*}(?!\s*(\,|}))/,
label: "Meta Data (YAML)"
});
exports.tokenList.push(exports.Data);
exports.ListDelimiter = createToken({
name: TokenNames_1.TokenNames.LIST_DELIMITER,
pattern: /,/,
label: ","
});
exports.tokenList.push(exports.ListDelimiter);
exports.InferenceEnd = createToken({
name: TokenNames_1.TokenNames.INFERENCE_END,
pattern: /-{2,}/,
pop_mode: true,
label: "-- (Inference End)"
});
exports.tokenList.push(exports.InferenceEnd);
const matchListItem = (text, offset, tokens, _groups, pattern) => {
let remainingText = text.substr(offset || 0);
let lastToken = (0, lodash_last_1.default)(tokens);
let afterNewline = lastTokenIsNewline(lastToken);
let afterEmptyline = lastToken && tokenMatcher(lastToken, exports.Emptyline);
if ((0, utils_1.arrayIsEmpty)(tokens) || afterEmptyline || afterNewline) {
let match = pattern.exec(remainingText);
if (match !== null) {
const indentStr = match[1] + " ";
emitIndentOrDedent(tokens, indentStr);
return match;
}
}
return null;
};
const orderedListItemPattern = /^([' '\t]*)\d+\.(?=\s)/;
const matchOrderedListItem = (0, lodash_partialright_1.default)(matchListItem, orderedListItemPattern);
exports.OrderedListItem = createToken({
name: TokenNames_1.TokenNames.ORDERED_LIST_ITEM,
pattern: matchOrderedListItem,
line_breaks: true,
label: "{Indentation}{number}. (Ordered List Item)",
start_chars_hint: [" ", "\t"]
});
exports.tokenList.push(exports.OrderedListItem);
const unorderedListItemPattern = /^([' '\t]*)\*(?=\s)/;
const matchUnorderedListItem = (0, lodash_partialright_1.default)(matchListItem, unorderedListItemPattern);
exports.UnorderedListItem = createToken({
name: TokenNames_1.TokenNames.UNORDERED_LIST_ITEM,
pattern: matchUnorderedListItem,
line_breaks: true,
label: "{Indentation}* (Unordered List Item)",
start_chars_hint: [" ", "\t"]
});
exports.tokenList.push(exports.UnorderedListItem);
const emptylinePattern = /^(?:[ \t]*(?:\r\n|\n)){2,}/;
const matchEmptyline = (text, offset, tokens) => {
let remainingText = text.substr(offset || 0);
let lastToken = (0, lodash_last_1.default)(tokens);
if (lastToken && tokenMatcher(lastToken, exports.Emptyline))
return null;
let match = emptylinePattern.exec(remainingText);
if (match !== null) {
if (match[0].length < remainingText.length) {
emitRemainingDedentTokens(tokens);
}
return match;
}
return null;
};
exports.Emptyline = createToken({
name: TokenNames_1.TokenNames.EMPTYLINE,
pattern: matchEmptyline,
line_breaks: true,
label: "{linebreak}{linebreak} (Empty Line)",
start_chars_hint: ["\r", "\n"]
});
exports.tokenList.push(exports.Emptyline);
exports.Indent = createToken({
name: TokenNames_1.TokenNames.INDENT,
pattern: chevrotain.Lexer.NA
});
exports.tokenList.push(exports.Indent);
exports.Dedent = createToken({
name: TokenNames_1.TokenNames.DEDENT,
pattern: chevrotain.Lexer.NA
});
exports.tokenList.push(exports.Dedent);
exports.StatementDefinition = createToken({
name: TokenNames_1.TokenNames.STATEMENT_DEFINITION,
pattern: /\[.+?\]\:/,
label: "[Statement Title]: (Statement Definition)"
});
exports.tokenList.push(exports.StatementDefinition);
exports.StatementReference = createToken({
name: TokenNames_1.TokenNames.STATEMENT_REFERENCE,
pattern: /\[[^-].*?\]/,
label: "[Statement Title] (Statement Reference)"
});
exports.tokenList.push(exports.StatementReference);
exports.StatementMention = createToken({
name: TokenNames_1.TokenNames.STATEMENT_MENTION,
pattern: /\@\[.+?\][ \t]?/,
label: "@[Statement Title] (Statement Mention)"
});
exports.tokenList.push(exports.StatementMention);
const statementNumberPattern = /^[' '\t]*\(\d+\)/;
const matchStatementNumber = (text, offset, tokens) => {
let remainingText = text.substr(offset || 0);
var lastToken = (0, lodash_last_1.default)(tokens);
let afterNewline = lastTokenIsNewline(lastToken);
let afterEmptyline = lastToken && tokenMatcher(lastToken, exports.Emptyline);
if ((0, utils_1.arrayIsEmpty)(tokens) || afterEmptyline || afterNewline) {
let match = statementNumberPattern.exec(remainingText);
if (match !== null) {
emitRemainingDedentTokens(tokens);
return match;
}
}
return null;
};
exports.StatementNumber = createToken({
name: TokenNames_1.TokenNames.STATEMENT_NUMBER,
pattern: matchStatementNumber,
line_breaks: true,
label: "(Number) (Statement Number)",
start_chars_hint: [" ", "\t", "("]
});
exports.tokenList.push(exports.StatementNumber);
exports.ArgumentDefinition = createToken({
name: TokenNames_1.TokenNames.ARGUMENT_DEFINITION,
pattern: /\<.+?\>\:/,
label: "<Argument Title>: (Argument Definition)"
});
exports.tokenList.push(exports.ArgumentDefinition);
exports.ArgumentReference = createToken({
name: TokenNames_1.TokenNames.ARGUMENT_REFERENCE,
pattern: /\<[^-].*?\>/,
label: "<Argument Title> (Argument Reference)"
});
exports.tokenList.push(exports.ArgumentReference);
exports.ArgumentMention = createToken({
name: TokenNames_1.TokenNames.ARGUMENT_MENTION,
pattern: /\@\<.+?\>[ \t]?/,
label: "@<Argument Title> (Argument Mention)"
});
exports.tokenList.push(exports.ArgumentMention);
const headingPattern = /^(#+)(?: )/;
const matchHeadingStart = (text, offset, tokens) => {
let remainingText = text.substr(offset || 0);
let lastToken = (0, lodash_last_1.default)(tokens);
let afterEmptyline = lastToken &&
(tokenMatcher(lastToken, exports.Emptyline) || tokenMatcher(lastToken, exports.Newline));
if (!lastToken || afterEmptyline) {
const match = headingPattern.exec(remainingText);
if (match) {
return match;
}
}
return null;
};
exports.HeadingStart = createToken({
name: TokenNames_1.TokenNames.HEADING_START,
pattern: matchHeadingStart,
label: "# (Heading Start)",
line_breaks: false,
start_chars_hint: ["#"]
});
exports.tokenList.push(exports.HeadingStart);
const matchBoldOrItalicStart = (text, offset, _tokens, _groups, pattern, rangeType) => {
let remainingText = text.substr(offset || 0);
let match = pattern.exec(remainingText);
if (match != null) {
rangesStack.push(rangeType);
return match;
}
return null;
};
const matchBoldOrItalicEnd = (text, offset, tokens, groups, pattern, rangeType) => {
let lastRange = (0, lodash_last_1.default)(rangesStack);
if (lastRange != rangeType)
return null;
let skipped = groups ? groups[chevrotain.Lexer.SKIPPED] : null;
let lastSkipped = (0, lodash_last_1.default)(skipped);
let lastMatched = (0, lodash_last_1.default)(tokens);
if (!lastMatched ||
(lastSkipped && lastSkipped.endOffset > lastMatched.endOffset)) {
return null;
}
let remainingText = text.substr(offset || 0);
let match = pattern.exec(remainingText);
if (match != null) {
rangesStack.pop();
return match;
}
return null;
};
const matchAsteriskBoldStart = (0, lodash_partialright_1.default)(matchBoldOrItalicStart, /^\*\*(?!\s)/, "AsteriskBold");
const matchAsteriskBoldEnd = (0, lodash_partialright_1.default)(matchBoldOrItalicEnd, /^\*\*(?:[ \t]|(?=\n|\r|\)|\}|\_|\.|,|!|\?|;|:|-|\*|$))/, "AsteriskBold");
const matchUnderscoreBoldStart = (0, lodash_partialright_1.default)(matchBoldOrItalicStart, /^__(?!\s)/, "UnderscoreBold");
const matchUnderscoreBoldEnd = (0, lodash_partialright_1.default)(matchBoldOrItalicEnd, /^__(?:[ \t]|(?=\n|\r|\)|\}|\_|\.|,|!|\?|;|:|-|\*|$))/, "UnderscoreBold");
const matchAsteriskItalicStart = (0, lodash_partialright_1.default)(matchBoldOrItalicStart, /^\*(?!\s)/, "AsteriskItalic");
const matchAsteriskItalicEnd = (0, lodash_partialright_1.default)(matchBoldOrItalicEnd, /^\*(?:[ \t]|(?=\n|\r|\)|\}|\_|\.|,|!|\?|;|:|-|\*|$))/, "AsteriskItalic");
const matchUnderscoreItalicStart = (0, lodash_partialright_1.default)(matchBoldOrItalicStart, /^\_(?!\s)/, "UnderscoreItalic");
const matchUnderscoreItalicEnd = (0, lodash_partialright_1.default)(matchBoldOrItalicEnd, /^\_(?:[ \t]|(?=\n|\r|\)|\}|\_|\.|,|!|\?|;|:|-|\*|$))/, "UnderscoreItalic");
exports.AsteriskBoldStart = createToken({
name: TokenNames_1.TokenNames.ASTERISK_BOLD_START,
pattern: matchAsteriskBoldStart,
label: "** (Bold Start)",
line_breaks: false,
start_chars_hint: ["*"]
});
exports.tokenList.push(exports.AsteriskBoldStart);
exports.AsteriskBoldEnd = createToken({
name: TokenNames_1.TokenNames.ASTERISK_BOLD_END,
pattern: matchAsteriskBoldEnd,
label: "** (Bold End)",
line_breaks: false,
start_chars_hint: ["*"]
});
exports.tokenList.push(exports.AsteriskBoldEnd);
exports.UnderscoreBoldStart = createToken({
name: TokenNames_1.TokenNames.UNDERSCORE_BOLD_START,
pattern: matchUnderscoreBoldStart,
label: "__ (Bold Start)",
line_breaks: false,
start_chars_hint: ["_"]
});
exports.tokenList.push(exports.UnderscoreBoldStart);
exports.UnderscoreBoldEnd = createToken({
name: TokenNames_1.TokenNames.UNDERSCORE_BOLD_END,
pattern: matchUnderscoreBoldEnd,
label: "__ (Bold End)",
line_breaks: false,
start_chars_hint: ["_"]
});
exports.tokenList.push(exports.UnderscoreBoldEnd);
exports.AsteriskItalicStart = createToken({
name: TokenNames_1.TokenNames.ASTERISK_ITALIC_START,
pattern: matchAsteriskItalicStart,
label: "* (Italic Start)",
line_breaks: false,
start_chars_hint: ["*"]
});
exports.tokenList.push(exports.AsteriskItalicStart);
exports.AsteriskItalicEnd = createToken({
name: TokenNames_1.TokenNames.ASTERISK_ITALIC_END,
pattern: matchAsteriskItalicEnd,
label: "* (Italic End)",
line_breaks: false,
start_chars_hint: ["*"]
});
exports.tokenList.push(exports.AsteriskItalicEnd);
exports.UnderscoreItalicStart = createToken({
name: TokenNames_1.TokenNames.UNDERSCORE_ITALIC_START,
pattern: matchUnderscoreItalicStart,
label: "_ (Italic Start)",
line_breaks: false,
start_chars_hint: ["_"]
});
exports.tokenList.push(exports.UnderscoreItalicStart);
exports.UnderscoreItalicEnd = createToken({
name: TokenNames_1.TokenNames.UNDERSCORE_ITALIC_END,
pattern: matchUnderscoreItalicEnd,
label: "_ (Italic End)",
line_breaks: false,
start_chars_hint: ["_"]
});
exports.tokenList.push(exports.UnderscoreItalicEnd);
const commentPattern = /(?:<!--(?:.|\n|\r)*?-->)|(?:\/\*(?:.|\n|\r)*?\*\/)|(?:\/\/.*?(?=\r\n|\n|\r))/y;
const commentWithTrailingLinebreaksPattern = /(?:(?:<!--(?:.|\n|\r)*?-->)|(?:\/\*(?:.|\n|\r)*?\*\/)|(?:\/\/.*?(?=\r\n|\n|\r)))(?:[ \t]*\r\n|\n|\r)*/y;
const commentWithOneTrailingLinebreakPattern = /(?:(?:<!--(?:.|\n|\r)*?-->)|(?:\/\*(?:.|\n|\r)*?\*\/)|(?:\/\/.*?(?=\r\n|\n|\r)))(?:[ \t]*(?:\r\n|\n|\r)(?!([ \t]*(\r\n|\n|\r))))?/y;
const matchComment = (text, offset, tokens) => {
let lastToken = (0, lodash_last_1.default)(tokens);
if (lastToken && tokenMatcher(lastToken, exports.Emptyline)) {
commentWithTrailingLinebreaksPattern.lastIndex = offset || 0;
return commentWithTrailingLinebreaksPattern.exec(text);
}
else if (lastToken && tokenMatcher(lastToken, exports.Newline)) {
commentWithOneTrailingLinebreakPattern.lastIndex = offset || 0;
return commentWithOneTrailingLinebreakPattern.exec(text);
}
else {
commentPattern.lastIndex = offset || 0;
return commentPattern.exec(text);
}
};
exports.Comment = createToken({
name: TokenNames_1.TokenNames.COMMENT,
pattern: matchComment,
group: chevrotain.Lexer.SKIPPED,
start_chars_hint: ["/", "<"],
label: "// or /**/ or <!-- --> (Comment)",
line_breaks: true
});
exports.tokenList.push(exports.Comment);
exports.Link = createToken({
name: TokenNames_1.TokenNames.LINK,
pattern: /\[[^\]]+?\]\([^\)]+?\)[ \t]?/,
label: "[Title](Url) (Link)"
});
exports.tokenList.push(exports.Link);
exports.Tag = createToken({
name: TokenNames_1.TokenNames.TAG,
pattern: /#(?:\([^\)]+\)|[a-zA-z0-9-\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+)[ \t]?/,
label: "#tag-text or #(tag text) (Tag)"
});
exports.tokenList.push(exports.Tag);
exports.Newline = createToken({
name: TokenNames_1.TokenNames.NEWLINE,
pattern: /[ \t]*(?:\r\n|\n|\r)/,
line_breaks: true,
label: "{linebreak} (New Line)"
});
exports.tokenList.push(exports.Newline);
exports.Spaces = createToken({
name: TokenNames_1.TokenNames.SPACES,
pattern: /( |\t)+/,
group: chevrotain.Lexer.SKIPPED
});
exports.tokenList.push(exports.Spaces);
exports.EscapedChar = createToken({
name: TokenNames_1.TokenNames.ESCAPED_CHAR,
pattern: /\\.(?: )*/,
label: "\\{character} (Escaped Character)"
});
exports.tokenList.push(exports.EscapedChar);
exports.SpecialChar = createToken({
name: TokenNames_1.TokenNames.SPECIAL_CHAR,
pattern: /(?:\.[^\s]+?\.[ \t]?)|(?:\:[^\s]+?\:[ \t]?)/,
label: ".{name}. or :{name}: (Special Character)"
});
exports.tokenList.push(exports.SpecialChar);
exports.Freestyle = createToken({
name: TokenNames_1.TokenNames.FREESTYLE,
pattern: /[^\\\@\#\*\_\[\]\,\.\:\;\<\/\>\-\r\n\(\)\{\}]+/,
line_breaks: true,
label: "Text Content"
});
exports.tokenList.push(exports.Freestyle);
exports.UnusedControlChar = createToken({
name: TokenNames_1.TokenNames.UNUSED_CONTROL_CHAR,
pattern: /[\@\#\*\_\[\]\,\.\:\;\<\/\>\-\(\)\{\}][ \t]?/,
label: "Text Content (Control Characters)"
});
exports.tokenList.push(exports.UnusedControlChar);
exports.EOF = chevrotain.EOF;
const lexerConfig = {
modes: {
default_mode: [
exports.Comment,
exports.FrontMatter,
exports.Data,
exports.EscapedChar,
exports.SpecialChar,
exports.Emptyline,
exports.Newline,
exports.Dedent,
exports.Indent,
exports.InferenceStart,
exports.IncomingSupport,
exports.IncomingAttack,
exports.OutgoingSupport,
exports.OutgoingAttack,
exports.Contradiction,
exports.IncomingUndercut,
exports.OutgoingUndercut,
exports.HeadingStart,
exports.StatementNumber,
exports.OrderedListItem,
exports.UnorderedListItem,
exports.AsteriskBoldEnd,
exports.UnderscoreBoldEnd,
exports.AsteriskItalicEnd,
exports.UnderscoreItalicEnd,
exports.AsteriskBoldStart,
exports.UnderscoreBoldStart,
exports.AsteriskItalicStart,
exports.UnderscoreItalicStart,
exports.Link,
exports.Tag,
exports.StatementDefinition,
exports.StatementReference,
exports.StatementMention,
exports.ArgumentDefinition,
exports.ArgumentReference,
exports.ArgumentMention,
exports.Spaces,
exports.Freestyle,
exports.UnusedControlChar
],
inference_mode: [
exports.Comment,
exports.Newline,
exports.EscapedChar,
exports.SpecialChar,
exports.InferenceEnd,
exports.Data,
exports.ListDelimiter,
exports.Spaces,
exports.Freestyle,
exports.UnusedControlChar
]
},
defaultMode: "default_mode"
};
const lexer = new chevrotain.Lexer(lexerConfig);
const tokenize = (text) => {
init();
let lexResult = lexer.tokenize(text);
if (lexResult.errors && lexResult.errors.length > 0) {
throw new Error("sad sad panda lexing errors detected");
}
const lastToken = (0, lodash_last_1.default)(lexResult.tokens);
if (lastToken && tokenMatcher(lastToken, exports.Emptyline)) {
lexResult.tokens.pop();
}
emitRemainingDedentTokens(lexResult.tokens);
return lexResult;
};
exports.tokenize = tokenize;
//# sourceMappingURL=lexer.js.map