mathup
Version:
Easy MathML authoring tool with a quick to write syntax
1 lines • 202 kB
Source Map (JSON)
{"version":3,"file":"mathup.cjs","sources":["../../src/compiler/parser/utils.js","../../src/compiler/parser/handlers/multiscripts.js","../../src/compiler/parser/handlers/group.js","../../src/compiler/parser/handlers/command.js","../../src/compiler/parser/handlers/prefix.js","../../src/compiler/parser/handlers/space.js","../../src/compiler/parser/handlers/index.js","../../src/compiler/parser/handlers/infix.js","../../src/compiler/parser/handlers/term.js","../../src/compiler/parser/handlers/expr.js","../../src/compiler/parser/parse.js","../../src/compiler/renders/to-dom.js","../../src/compiler/renders/to-string.js","../../src/compiler/renders/update-dom.js","../../src/compiler/tokenizer/lexemes.js","../../src/compiler/tokenizer/scanners/utils.js","../../src/compiler/tokenizer/scanners/alpha.js","../../src/compiler/tokenizer/scanners/backslash.js","../../src/compiler/tokenizer/scanners/backtick.js","../../src/compiler/tokenizer/scanners/group-sep.js","../../src/compiler/tokenizer/scanners/infix.js","../../src/compiler/tokenizer/scanners/newline.js","../../src/compiler/tokenizer/scanners/number.js","../../src/compiler/tokenizer/scanners/octothorpe.js","../../src/compiler/tokenizer/scanners/operator.js","../../src/compiler/tokenizer/scanners/paren-close.js","../../src/compiler/tokenizer/scanners/paren-open.js","../../src/compiler/tokenizer/scanners/quote.js","../../src/compiler/tokenizer/scanners/space.js","../../src/compiler/tokenizer/scanners/unhandled.js","../../src/compiler/tokenizer/scanners/index.js","../../src/compiler/tokenizer/index.js","../../src/compiler/transformer/transforms/fenced-group.js","../../src/compiler/transformer/transforms/literal.js","../../src/compiler/transformer/transforms/matrix-group.js","../../src/compiler/transformer/transforms/multiscripts.js","../../src/compiler/transformer/transforms/operation.js","../../src/compiler/transformer/transforms/sentence.js","../../src/compiler/transformer/transforms/space-literal.js","../../src/compiler/transformer/transforms/styles.js","../../src/compiler/transformer/transforms/term.js","../../src/compiler/transformer/transforms/text-transforms.js","../../src/compiler/transformer/transforms/unary-operation.js","../../src/compiler/transformer/transforms/index.js","../../src/compiler/transformer/index.js","../../src/compiler/index.js","../../src/index.js"],"sourcesContent":["/**\n * @typedef {import(\"../tokenizer/index.js\").Token} Token\n * @typedef {import(\"./index.js\").Node} Node\n */\n\n/**\n * @param {Token} token\n * @returns {boolean}\n */\nexport function isPipeOperator(token) {\n return token.type === \"operator\" && token.value === \"|\";\n}\n\n/**\n * @param {Token} token\n * @returns {boolean}\n */\nexport function isDoublePipeOperator(token) {\n return token.type === \"operator\" && token.value === \"∥\";\n}\n\n/**\n * Double pipe defaults to the parallel-to character which is behaves\n * wrong when used as a fence.\n * @param {Node[]} items\n * @returns {void}\n */\nexport function maybeFixDoublePipe(items) {\n if (items.length < 2) {\n return;\n }\n\n const first = items.at(0);\n if (first?.type === \"OperatorLiteral\" && first.value === \"∥\") {\n first.value = \"‖\";\n }\n\n const last = items.at(-1);\n if (last?.type === \"OperatorLiteral\" && last.value === \"∥\") {\n last.value = \"‖\";\n }\n}\n\n/**\n * @param {Node} node\n * @returns {void}\n */\nexport function addZeroLSpaceToOperator(node) {\n let first = node;\n while (\n first &&\n (first.type === \"Term\" ||\n first.type === \"UnaryOperation\" ||\n first.type === \"BinaryOperation\" ||\n first.type === \"TernaryOperation\")\n ) {\n [first] = first.items;\n }\n\n if (!first) {\n return;\n }\n\n if (first.type === \"OperatorLiteral\") {\n if (!first.attrs) {\n first.attrs = {};\n }\n\n if (typeof first.attrs.lspace === \"undefined\") {\n first.attrs.lspace = 0;\n }\n }\n}\n","import expr from \"./expr.js\";\n\n/**\n * @typedef {import(\"../../tokenizer/index.js\").Token} Token\n * @typedef {import(\"../index.js\").Node} Node\n * @typedef {import(\"../index.js\").Term} Term\n * @typedef {import(\"../index.js\").MultiScripts} MultiScripts\n */\n\n/** @returns {Term} */\nfunction empty() {\n return { type: \"Term\", items: [] };\n}\n\n/**\n * @param {Token} token\n * @returns {boolean}\n */\nfunction isIndexBreak(token) {\n if (token.type === \"sep.row\" || token.type === \"sep.col\") {\n return true;\n }\n\n if (token.type !== \"infix\") {\n return false;\n }\n\n return token.value === \"sub\" || token.value === \"sup\";\n}\n\n/**\n * @param {Node[] | null} nodes\n * @returns {Node[]}\n */\nfunction prepareScript(nodes) {\n if (!nodes) {\n return [];\n }\n\n if (nodes.at(-1)?.type === \"SpaceLiteral\") {\n // ignore trailing space\n nodes.pop();\n }\n\n if (nodes.length !== 1) {\n return nodes;\n }\n\n const [node] = nodes;\n\n if (node.type !== \"FencedGroup\" || node.items.length !== 1) {\n return nodes;\n }\n\n const [cell] = node.items;\n\n if (cell.length === 1) {\n const [first] = cell;\n const term =\n first.type === \"Term\" && first.items.length === 1\n ? first.items[0]\n : first;\n\n if (term.type.endsWith(\"Literal\")) {\n // We fenced a single item for a reason, lets keep them.\n return nodes;\n }\n }\n return [\n {\n type: \"Term\",\n items: cell,\n },\n ];\n}\n\n/**\n * Parse the series of sub- and sup indices of the multiscript. Note\n * we assume the first two tokens have already been checked.\n *\n * @param {import(\"../parse.js\").State} state\n * @returns {{ scripts: [Node[], Node[]][], end: number }}\n */\nfunction parseScripts(state) {\n let i = state.start + 1;\n let token = state.tokens.at(i);\n\n /** @type {[Node[], Node[]][]} */\n const scripts = [];\n /** @type {Node[] | null} */\n let sub = null;\n /** @type {Node[] | null} */\n let sup = null;\n\n /**\n * @returns {void}\n */\n function commit() {\n if ((sub && sub.length > 0) || (sup && sup.length > 0)) {\n scripts.push([prepareScript(sub), prepareScript(sup)]);\n }\n\n sub = null;\n sup = null;\n }\n\n // Remember previous position to allow repeat positions.\n let position = token?.value ?? \"sub\";\n\n while (token && isIndexBreak(token)) {\n if (token.type === \"infix\") {\n // Update current position\n position = token.value;\n }\n\n i += 1;\n token = state.tokens[i];\n\n if (token && token.type === \"space\") {\n i += 1;\n token = state.tokens[i];\n }\n\n /** @type {Node[]} */\n const items = [];\n while (token && token.type !== \"paren.close\" && !isIndexBreak(token)) {\n const next = expr({\n ...state,\n start: i,\n stack: [],\n nestLevel: state.nestLevel + 1,\n stopAt(other) {\n return (\n other.type === \"infix\" &&\n (other.value === \"sub\" || other.value === \"sup\")\n );\n },\n });\n\n items.push(next.node);\n i = next.end;\n token = state.tokens[i];\n }\n\n if (position === \"sup\") {\n if (sup) {\n commit();\n }\n sup = items;\n } else {\n if (sub) {\n commit();\n }\n sub = items;\n }\n }\n\n if (sub || sup) {\n commit();\n }\n\n if (token?.type === \"paren.close\") {\n i += 1;\n }\n\n return {\n scripts,\n end: i,\n };\n}\n\n/**\n * Parse a multiscript. Note that we assume the first two tokens been\n * checked.\n *\n * @param {import(\"../parse.js\").State} state\n * @returns {{ node: MultiScripts, end: number }}\n */\nexport default function multiscripts(state) {\n let { scripts, end: i } = parseScripts(state);\n let token = state.tokens.at(i);\n\n /** @type {Node | undefined} */\n let base;\n /** @type {[Node[], Node[]][] | undefined} */\n let prescripts;\n\n if (!token || token.type === \"space\") {\n // There is nothing after the already parsed scripts. Apply as postscripts.\n base = state.stack.pop();\n if (base?.type === \"SpaceLiteral\") {\n base = empty();\n }\n } else {\n // existing scripts are prescripts. See if there are postscripts.\n prescripts = scripts;\n scripts = [];\n\n const next = expr({\n ...state,\n start: i,\n stack: [],\n nestLevel: state.nestLevel + 1,\n stopAt(other) {\n return other.type === \"paren.open\";\n },\n });\n\n base = next.node;\n i = next.end;\n token = state.tokens[i];\n\n if (token?.type === \"paren.open\") {\n const nextToken = state.tokens.at(i + 1);\n\n if (\n nextToken?.type === \"infix\" &&\n (nextToken.value === \"sub\" || nextToken.value === \"sup\")\n ) {\n ({ scripts, end: i } = parseScripts({ ...state, start: i }));\n }\n }\n }\n\n /** @type {MultiScripts} */\n const node = {\n type: \"MultiScripts\",\n base: base ?? empty(),\n post: scripts,\n };\n\n if (prescripts) {\n node.pre = prescripts;\n }\n\n return {\n node,\n end: i,\n };\n}\n","import { addZeroLSpaceToOperator } from \"../utils.js\";\n\nimport expr from \"./expr.js\";\nimport multiscripts from \"./multiscripts.js\";\n\n/**\n * @typedef {import(\"../../tokenizer/index.js\").Token} Token\n * @typedef {import(\"../index.js\").Node} Node\n * @typedef {import(\"../index.js\").FencedGroup} FencedGroup\n * @typedef {import(\"../index.js\").MatrixGroup} MatrixGroup\n * @typedef {import(\"../index.js\").MultiScripts} MultiScripts\n * @typedef {import(\"../index.js\").LiteralAttrs} LiteralAttrs\n */\n\n/**\n * @param {Token} token\n * @returns {Omit<Token, \"type\">}\n */\nfunction omitType(token) {\n const { type: _type, ...rest } = token;\n\n return rest;\n}\n\n/**\n * @param {import(\"../parse.js\").State} state\n * @returns {{ node: FencedGroup | MatrixGroup | MultiScripts, end: number }}\n */\nexport default function group(state) {\n let i = state.start;\n let token = state.tokens[i];\n\n const open = token;\n\n /** @type {{ value: string, attrs?: LiteralAttrs }[]} */\n const seps = [];\n\n /** @type {Node[]} */\n let cell = [];\n\n /** @type {Node[][]} */\n let cols = [];\n\n /** @type {Node[][][]} */\n const rows = [];\n\n i += 1;\n token = state.tokens[i];\n\n if (token && token.type === \"space\") {\n // Ignore leading space.\n i += 1;\n token = state.tokens[i];\n }\n\n if (\n token &&\n token.type === \"infix\" &&\n (token.value === \"sub\" || token.value === \"sup\")\n ) {\n return multiscripts({ ...state, start: i - 1 });\n }\n\n while (token && token.type !== \"paren.close\") {\n if (token.type === \"space\" && token.value === \" \") {\n // No need to add tokens which don’t render elements to our\n // cell.\n i += 1;\n token = state.tokens[i];\n\n continue;\n }\n\n if (token.type === \"sep.col\") {\n /** @type {{ value: string, attrs?: LiteralAttrs }} */\n const sepToken = { value: token.value };\n if (token.attrs) {\n sepToken.attrs = token.attrs;\n }\n\n seps.push(sepToken);\n cols.push(cell);\n cell = [];\n i += 1;\n token = state.tokens[i];\n\n // Ignore leading space.\n if (token && token.type === \"space\") {\n i += 1;\n token = state.tokens[i];\n }\n\n continue;\n }\n\n if (token.type === \"sep.row\") {\n cols.push(cell);\n rows.push(cols);\n cell = [];\n cols = [];\n i += 1;\n token = state.tokens[i];\n\n // Ignore leading space.\n if (token && token.type === \"space\") {\n i += 1;\n token = state.tokens[i];\n }\n\n continue;\n }\n\n if (cell.length === 1) {\n // If first element is an operator it may throw alignment out\n // with its implicit lspace.\n addZeroLSpaceToOperator(cell[0]);\n }\n\n const next = expr({\n ...state,\n start: i,\n stack: cell,\n nestLevel: state.nestLevel + 1,\n });\n\n cell.push(next.node);\n i = next.end;\n token = state.tokens[i];\n }\n\n if (cell.length > 0) {\n cols.push(cell);\n }\n\n const end = i + 1;\n const close = token && token.type === \"paren.close\" ? token : null;\n\n const attrs = {\n open: omitType(open),\n close: close ? omitType(close) : null,\n seps,\n };\n\n if (attrs.close?.value === \"|\" && !open.value) {\n // Add a small space before the \"evaluate at\" operator\n if (!attrs.close.attrs) {\n attrs.close.attrs = {};\n }\n attrs.close.attrs.lspace = \"0.35ex\";\n }\n\n if (rows.length === 0) {\n return {\n node: { type: \"FencedGroup\", items: cols, attrs },\n end,\n };\n }\n\n const rowItems = rows;\n\n if (cols.length > 0) {\n rowItems.push(cols);\n }\n\n return {\n node: { type: \"MatrixGroup\", items: rowItems, attrs },\n end,\n };\n}\n","import { isDoublePipeOperator, isPipeOperator } from \"../utils.js\";\n\nimport expr from \"./expr.js\";\n\n/**\n * @typedef {import(\"../../tokenizer/index.js\").Token} Token\n * @typedef {import(\"../index.js\").Node} Node\n * @typedef {import(\"../index.js\").UnaryOperation} UnaryOperation\n * @typedef {import(\"../index.js\").BinaryOperation} BinaryOperation\n * @typedef {import(\"../index.js\").TernaryOperation} TernaryOperation\n * @typedef {UnaryOperation | BinaryOperation | TernaryOperation} Operation\n * @typedef {import(\"../index.js\").Term} Term\n * @typedef {import(\"../parse.js\").State} State\n */\n\n/**\n * @param {Node} node\n * @param {string[]} transforms\n * @returns {Operation | Term}\n */\nfunction insertTransformNode(node, transforms) {\n if (node.type === \"Term\" && node.items.length > 0) {\n // Only apply transform to first node.\n const [first, ...rest] = node.items;\n return {\n ...node,\n items: [insertTransformNode(first, transforms), ...rest],\n };\n }\n\n if (node.type === \"BinaryOperation\") {\n const [left, right] = node.items;\n return {\n ...node,\n items: [insertTransformNode(left, transforms), right],\n };\n }\n\n if (node.type === \"TernaryOperation\") {\n const [a, b, c] = node.items;\n return {\n ...node,\n items: [insertTransformNode(a, transforms), b, c],\n };\n }\n\n return {\n type: \"UnaryOperation\",\n name: \"command\",\n transforms,\n items: [node],\n };\n}\n\n/**\n * @param {State} state\n * @returns {((token: Token) => boolean) | undefined}\n */\nfunction maybeStopAtPipe({ start, tokens, stack, stopAt }) {\n if (stopAt) {\n return stopAt;\n }\n\n if (stack.length !== 1) {\n return undefined;\n }\n\n const lastToken = start > 0 ? tokens[start - 1] : undefined;\n if (!lastToken || lastToken.type !== \"operator\") {\n return undefined;\n }\n\n if (lastToken.value === \"|\") {\n return isPipeOperator;\n }\n\n if (lastToken.value === \"∥\") {\n return isDoublePipeOperator;\n }\n\n return undefined;\n}\n\n/**\n * @param {State} state\n * @returns {{ node: Operation | Term; end: number }}\n */\nexport default function command(state) {\n const token = state.tokens[state.start];\n\n if (!token.name) {\n throw new Error(\"Got command token without a name\");\n }\n\n /** @type {string[]} */\n const textTransforms = [];\n\n /** @type {Map<string, string>} */\n const styles = new Map();\n\n /**\n * @param {Token} token\n * @returns {void}\n */\n function handleCommandToken({ name, value }) {\n if (!value) {\n return;\n }\n\n if (name === \"text-transform\") {\n textTransforms.push(value);\n } else if (name) {\n styles.set(name, value);\n }\n }\n\n const stopAt = maybeStopAtPipe(state);\n handleCommandToken(token);\n\n let pos = state.start + 1;\n let nextToken = state.tokens[pos];\n while (\n nextToken &&\n (nextToken.type === \"command\" || nextToken.type === \"space\")\n ) {\n if (nextToken.type === \"command\") {\n handleCommandToken(nextToken);\n }\n\n pos += 1;\n nextToken = state.tokens[pos];\n }\n\n const next = expr({\n ...state,\n stack: [],\n start: pos,\n nestLevel: state.nestLevel + 1,\n textTransforms,\n stopAt,\n });\n\n if (textTransforms.length === 0) {\n // Only apply styles.\n return {\n node: {\n type: \"UnaryOperation\",\n name: \"command\",\n styles,\n items: [next.node],\n },\n end: next.end,\n };\n }\n\n const node = insertTransformNode(next.node, textTransforms);\n\n if (styles.size > 0) {\n return {\n node: {\n type: \"UnaryOperation\",\n name: \"command\",\n styles,\n items: [node],\n },\n end: next.end,\n };\n }\n\n return {\n node,\n end: next.end,\n };\n}\n","import { isDoublePipeOperator, isPipeOperator } from \"../utils.js\";\n\nimport expr from \"./expr.js\";\n\n/**\n * @typedef {import(\"../../tokenizer/index.js\").Token} Token\n * @typedef {import(\"../index.js\").Node} Node\n * @typedef {import(\"../index.js\").UnaryOperation} UnaryOperation\n * @typedef {import(\"../index.js\").BinaryOperation} BinaryOperation\n * @typedef {import(\"../parse.js\").State} State\n */\n\n/**\n * @param {Node[]} nodes\n * @returns {Node}\n */\nfunction toTermOrUnwrap(nodes) {\n if (nodes.length === 1) {\n return nodes[0];\n }\n\n return { type: \"Term\", items: nodes };\n}\n\n/**\n * @param {State} state\n * @returns {((token: Token) => boolean) | undefined}\n */\nfunction maybeStopAtPipe({ start, tokens, stack, stopAt }) {\n if (stopAt) {\n return stopAt;\n }\n\n if (stack.length !== 1) {\n return undefined;\n }\n\n const token = tokens[start];\n if (!token || (token.arity && token.arity !== 1)) {\n return undefined;\n }\n\n const lastToken = start > 0 ? tokens[start - 1] : undefined;\n if (!lastToken || lastToken.type !== \"operator\") {\n return undefined;\n }\n\n if (lastToken.value === \"|\") {\n return isPipeOperator;\n }\n\n if (lastToken.value === \"∥\") {\n return isDoublePipeOperator;\n }\n\n return undefined;\n}\n\n/**\n * @param {import(\"../parse.js\").State} state\n * @returns {{ node: UnaryOperation | BinaryOperation; end: number }}\n */\nexport default function prefix(state) {\n const { tokens, start } = state;\n const token = tokens[start];\n const nestLevel = state.nestLevel + 1;\n\n if (!token.name) {\n throw new Error(\"Got prefix token without a name\");\n }\n\n const stopAt = maybeStopAtPipe(state);\n\n let next = expr({\n ...state,\n stack: [],\n start: start + 1,\n nestLevel,\n stopAt,\n });\n if (next && next.node && next.node.type === \"SpaceLiteral\") {\n next = expr({\n ...state,\n stack: [],\n start: next.end,\n nestLevel,\n stopAt,\n });\n }\n\n // XXX: Arity > 2 not implemented.\n if (token.arity === 2) {\n if (\n next &&\n next.node &&\n next.node.type === \"FencedGroup\" &&\n next.node.items.length === 2\n ) {\n const [first, second] = next.node.items;\n /** @type {[Node, Node]} */\n const items =\n token.name === \"root\"\n ? [toTermOrUnwrap(second), toTermOrUnwrap(first)]\n : [toTermOrUnwrap(first), toTermOrUnwrap(second)];\n\n return {\n node: {\n type: \"BinaryOperation\",\n name: token.name,\n attrs: token.attrs,\n items,\n },\n end: next.end,\n };\n }\n\n const first = next;\n let second =\n next &&\n expr({\n ...state,\n stack: [],\n start: next.end,\n nestLevel,\n });\n\n if (second && second.node && second.node.type === \"SpaceLiteral\") {\n second = expr({\n ...state,\n stack: [],\n start: second.end,\n nestLevel,\n });\n }\n\n /** @type {BinaryOperation} */\n const node = {\n type: \"BinaryOperation\",\n name: token.name,\n items: [first.node, second.node],\n };\n\n if (token.name === \"root\") {\n node.items = [second.node, first.node];\n }\n\n if (token.attrs) {\n node.attrs = token.attrs;\n }\n\n return {\n node,\n end: second.end,\n };\n }\n\n /** @type {UnaryOperation} */\n const node = {\n type: \"UnaryOperation\",\n name: token.name,\n items: [next.node],\n };\n\n if (token.accent) {\n node.accent = token.accent;\n }\n\n if (token.attrs) {\n node.attrs = token.attrs;\n }\n\n if (\n next &&\n next.node &&\n next.node.type === \"FencedGroup\" &&\n next.node.items.length === 1\n ) {\n // The operand is not a matrix.\n\n node.items = [toTermOrUnwrap(next.node.items[0])];\n }\n\n return {\n node,\n end: next.end,\n };\n}\n","/**\n * @typedef {import(\"../parse.js\").State} State\n * @typedef {import(\"../index.js\").SpaceLiteral} SpaceLiteral\n */\n\n/**\n * @param {number} n - Number of space literals\n * @returns {number} - The width in units of ex\n */\nfunction spaceWidth(n) {\n if (n <= 0) {\n return 0;\n }\n\n if (n <= 3) {\n return 0.35 * (n - 1);\n }\n\n if (n <= 5) {\n return 0.5 * (n - 1);\n }\n\n return n - 3;\n}\n\n/**\n * @param {State} state\n * @returns {{ node: SpaceLiteral, end: number }}\n */\nexport default function space(state) {\n const token = state.tokens[state.start];\n const lineBreak = token.value.startsWith(\"\\n\");\n\n const width = lineBreak ? 0 : token.value.length;\n\n return {\n node: {\n type: \"SpaceLiteral\",\n attrs: { width: `${spaceWidth(width)}ex` },\n },\n end: state.start + 1,\n };\n}\n","import command from \"./command.js\";\nimport group from \"./group.js\";\nimport infix from \"./infix.js\";\nimport prefix from \"./prefix.js\";\nimport space from \"./space.js\";\n\n/**\n * @typedef {import(\"../../tokenizer/index.js\").TokenType} TokenType\n * @typedef {import(\"../parse.js\").State} State\n * @typedef {import(\"../index.js\").Node} Node\n * @typedef {import(\"../index.js\").Literal} Literal\n * @typedef {(state: State) => { node: Node, end: number }} Handler\n * @typedef {\"Ident\" | \"Number\" | \"Operator\" | \"Text\"} LiteralType\n */\n\n/**\n * @param {LiteralType} type\n * @returns {Handler}\n */\nconst literal =\n (type) =>\n ({ start, tokens }) => {\n const { value, attrs } = tokens[start];\n\n /** @type {Literal} */\n const node = {\n type: `${type}Literal`,\n value,\n };\n\n if (attrs) {\n node.attrs = attrs;\n }\n\n return {\n node,\n end: start + 1,\n };\n };\n\n/** @type {[TokenType, Handler][]} */\nconst handlers = [\n [\"command\", command],\n [\"ident\", literal(\"Ident\")],\n [\"number\", literal(\"Number\")],\n [\"operator\", literal(\"Operator\")],\n [\"text\", literal(\"Text\")],\n [\"infix\", infix],\n [\"paren.open\", group],\n [\"prefix\", prefix],\n [\"space\", space],\n];\n\nexport default new Map(handlers);\n","import { addZeroLSpaceToOperator, maybeFixDoublePipe } from \"../utils.js\";\n\nimport expr from \"./expr.js\";\n\nimport handlers from \"./index.js\";\n\n/**\n * @typedef {import(\"../parse.js\").State} State\n * @typedef {import(\"../index.js\").Node} Node\n * @typedef {import(\"../index.js\").Term} Term\n * @typedef {import(\"../index.js\").BinaryOperation} BinaryOperation\n * @typedef {import(\"../index.js\").TernaryOperation} TernaryOperation\n */\n\n/** @returns {Term} */\nfunction empty() {\n return { type: \"Term\", items: [] };\n}\n\nconst SHOULD_STOP = [\"ident\", \"number\", \"operator\", \"text\"];\n\n/**\n * Remove surrounding brackets.\n *\n * @template {BinaryOperation | TernaryOperation} Operation\n * @param {Operation} node\n * @returns {Operation}\n */\nfunction maybeRemoveFence(node) {\n const mutated = node;\n\n mutated.items.forEach((item, i) => {\n if (item.type !== \"FencedGroup\" || item.items.length !== 1) {\n // No fences to remove.\n return;\n }\n\n if (i === 0 && node.name !== \"frac\") {\n // Keep fences around base in sub- and superscripts.\n return;\n }\n\n const [cell] = item.items;\n\n if (cell.length !== 1) {\n mutated.items[i] = { type: \"Term\", items: cell };\n return;\n }\n\n const [first] = cell;\n const term =\n first.type === \"Term\" && first.items.length === 1\n ? first.items[0]\n : first;\n\n if (term.type.endsWith(\"Literal\")) {\n // We fenced a single item for a reason, lets keep them.\n return;\n }\n\n mutated.items[i] = term;\n });\n\n return mutated;\n}\n\n/**\n * Change `lim` to `under`, and `sum` and `prod` to `under` or `over`.\n *\n * @template {BinaryOperation | TernaryOperation} Operation\n * @param {Operation} node\n * @returns {Operation}\n */\nfunction maybeApplyUnderOver(node) {\n const mutated = node;\n const [operator] = node.items;\n\n if (operator.type !== \"OperatorLiteral\") {\n return mutated;\n }\n\n if (\n node.name === \"sub\" &&\n [\"lim\", \"∑\", \"∏\", \"⋂\", \"⋃\", \"⋀\", \"⋁\"].includes(operator.value)\n ) {\n mutated.name = \"under\";\n\n return mutated;\n }\n\n if (\n node.name === \"subsup\" &&\n [\"∑\", \"∏\", \"⋂\", \"⋃\", \"⋀\", \"⋁\"].includes(operator.value)\n ) {\n mutated.name = \"underover\";\n\n return mutated;\n }\n\n return mutated;\n}\n\n/**\n * @template {BinaryOperation | TernaryOperation} Operation\n * @param {Operation} node\n * @returns {Operation}\n */\nfunction fixFracSpacing(node) {\n if (node.name !== \"frac\") {\n return node;\n }\n\n for (const item of node.items) {\n addZeroLSpaceToOperator(item);\n }\n\n return node;\n}\n\n/**\n * @template {BinaryOperation | TernaryOperation} Operation\n * @param {Operation} node\n * @returns {Operation}\n */\nfunction post(node) {\n return fixFracSpacing(maybeRemoveFence(maybeApplyUnderOver(node)));\n}\n\n/**\n * @param {string} op\n * @param {BinaryOperation} left\n * @param {Node} right\n * @returns {BinaryOperation | TernaryOperation}\n */\nfunction maybeTernary(op, left, right) {\n if (left.name === \"sub\" && op === \"sup\") {\n const [base, sub] = left.items;\n\n return {\n type: \"TernaryOperation\",\n name: \"subsup\",\n items: [base, sub, right],\n };\n }\n\n if (left.name === \"sup\" && op === \"sub\") {\n const [base, sup] = left.items;\n\n return {\n type: \"TernaryOperation\",\n name: \"subsup\",\n items: [base, right, sup],\n };\n }\n\n if (left.name === \"under\" && (op === \"over\" || op === \"sup\")) {\n const [base, under] = left.items;\n\n return {\n type: \"TernaryOperation\",\n name: \"underover\",\n items: [base, under, right],\n };\n }\n\n if (left.name === \"over\" && (op === \"under\" || op === \"sub\")) {\n const [base, over] = left.items;\n\n return {\n type: \"TernaryOperation\",\n name: \"underover\",\n items: [base, right, over],\n };\n }\n\n const node = post({\n type: \"BinaryOperation\",\n name: op,\n items: [left, right],\n });\n\n return rightAssociate(node.name, node.items);\n}\n\n/**\n * @param {string} op\n * @param {[Node, Node]} operands\n * @returns {BinaryOperation}\n */\nfunction rightAssociate(op, [left, right]) {\n if (left.type !== \"BinaryOperation\" || op === \"frac\") {\n return {\n type: \"BinaryOperation\",\n name: op,\n items: [left, right],\n };\n }\n\n const [a, b] = left.items;\n\n return {\n type: \"BinaryOperation\",\n name: left.name,\n items: [a, rightAssociate(op, [b, right])],\n };\n}\n\n/**\n * @param {Node[]} nodes\n * @returns {boolean}\n */\nfunction isPipeDelimited(nodes) {\n if (nodes.length < 3) {\n return false;\n }\n\n const open = nodes.at(0);\n const close = nodes.at(-1);\n\n return (\n open?.type === \"OperatorLiteral\" &&\n close?.type === \"OperatorLiteral\" &&\n (open.value === \"|\" || open.value === \"∥\" || open.value === \"‖\") &&\n open.value === close.value\n );\n}\n\n/**\n * @param {State} state\n * @returns {{ node: BinaryOperation | TernaryOperation; end: number }}\n */\nexport default function infix(state) {\n const { tokens, start, stack } = state;\n const nestLevel = state.nestLevel + 1;\n const token = tokens[start];\n\n /** @type {Node | undefined} */\n let left;\n if (isPipeDelimited(stack)) {\n maybeFixDoublePipe(stack);\n left = {\n type: \"Term\",\n items: [...stack],\n };\n stack.splice(0, stack.length);\n } else {\n left = stack.pop();\n\n if (left?.type === \"SpaceLiteral\") {\n left = stack.pop();\n }\n }\n\n if (!left) {\n left = empty();\n }\n\n const nextToken = tokens[start + 1];\n\n let next;\n if (nextToken && SHOULD_STOP.includes(nextToken.type)) {\n const handleRight = handlers.get(nextToken.type);\n\n if (!handleRight) {\n throw new Error(\"Unknown handler\");\n }\n\n next = handleRight({\n ...state,\n stack: [],\n start: start + 1,\n nestLevel,\n });\n } else {\n next = expr({\n ...state,\n stack: [],\n start: start + 1,\n nestLevel,\n });\n }\n\n if (next && next.node && next.node.type === \"SpaceLiteral\") {\n next = expr({\n ...state,\n stack: [],\n start: next.end,\n nestLevel,\n });\n }\n\n const { end, node: right } = next;\n\n if (left.type === \"BinaryOperation\") {\n return {\n end,\n node: post(maybeTernary(token.value, left, right)),\n };\n }\n\n return {\n end,\n node: post({\n type: \"BinaryOperation\",\n name: token.value,\n items: [left, right],\n }),\n };\n}\n","import { maybeFixDoublePipe } from \"../utils.js\";\n\nimport handlers from \"./index.js\";\n\n/**\n * @typedef {import(\"../index.js\").IdentLiteral} IdentLiteral\n * @typedef {import(\"../index.js\").Literal} Literal\n * @typedef {import(\"../index.js\").LiteralAttrs} LiteralAttrs\n * @typedef {import(\"../index.js\").Node} Node\n * @typedef {import(\"../index.js\").OperatorLiteral} OperatorLiteral\n * @typedef {import(\"../index.js\").Term} Term\n * @typedef {import(\"../index.js\").UnaryOperation} UnaryOperation\n * @typedef {import(\"../parse.js\").State} State\n */\n\nconst KEEP_GOING_TYPES = [\n \"command\",\n \"ident\",\n \"infix\",\n \"number\",\n \"operator\",\n \"paren.open\",\n \"prefix\",\n \"text\",\n];\n\n/**\n * @param {Node[]} items\n * @param {string[]} [textTransforms]\n * @returns {void}\n */\nfunction maybeFixDifferential(items, textTransforms) {\n // We may want to make the differnetial d operator an actual\n // operator to fix some spacing during integration.\n if (items.length < 2) {\n return;\n }\n\n const [first, second] = items;\n\n if (first.type !== \"IdentLiteral\" || first.value !== \"d\") {\n return;\n }\n\n let operand = second;\n while (\n operand.type === \"UnaryOperation\" ||\n operand.type === \"BinaryOperation\" ||\n operand.type === \"TernaryOperation\"\n ) {\n [operand] = operand.items;\n }\n\n if (operand.type !== \"IdentLiteral\") {\n return;\n }\n\n const value = (textTransforms?.length ?? 0) > 0 ? first.value : \"𝑑\";\n\n /** @type {OperatorLiteral & { attrs: LiteralAttrs }} */\n const node = {\n ...items[0],\n type: \"OperatorLiteral\",\n value,\n attrs: {\n ...(first.attrs ?? {}),\n rspace: \"0\",\n },\n };\n\n items[0] = node;\n}\n\n/**\n * @param {State} state\n * @returns {{ node: Term; end: number }}\n */\nexport default function term(state) {\n let i = state.start;\n let token = state.tokens[i];\n /** @type {Node[]} */\n const items = [];\n\n while (\n token &&\n KEEP_GOING_TYPES.includes(token.type) &&\n // Perhaps the parent handler wants to use this token.\n !state.stopAt?.(token)\n ) {\n const handler = handlers.get(token.type);\n\n if (!handler) {\n throw new Error(\"Unknown Hander\");\n }\n\n const next = handler({\n ...state,\n start: i,\n stack: items,\n });\n\n items.push(next.node);\n\n i = next.end;\n token = state.tokens[i];\n }\n\n maybeFixDifferential(items, state.textTransforms);\n maybeFixDoublePipe(items);\n\n return {\n node: {\n type: \"Term\",\n items,\n },\n end: i,\n };\n}\n","import group from \"./group.js\";\nimport infix from \"./infix.js\";\nimport prefix from \"./prefix.js\";\nimport space from \"./space.js\";\nimport term from \"./term.js\";\n\n/** @typedef {import(\"../index.js\").Node} Node */\n\n/**\n * @param {import(\"../parse.js\").State} state\n * @returns {{ node: Node; end: number }}\n */\nexport default function expr(state) {\n if (state.start >= state.tokens.length) {\n return {\n node: {\n type: \"Term\",\n items: [],\n },\n\n end: state.start,\n };\n }\n\n const { type } = state.tokens[state.start];\n\n if (type === \"paren.open\") {\n return group(state);\n }\n\n if (type === \"space\") {\n return space(state);\n }\n\n if (type === \"infix\") {\n return infix(state);\n }\n\n if (type === \"prefix\") {\n return prefix(state);\n }\n\n return term(state);\n}\n","import expr from \"./handlers/expr.js\";\n\n/**\n * @typedef {import(\"../tokenizer/index.js\").Token} Token\n * @typedef {import(\"./index.js\").Node} Node\n * @typedef {import(\"./index.js\").Sentence} Sentence\n *\n * @typedef {object} State\n * @property {Token[]} tokens\n * @property {number} start\n * @property {Node[]} stack\n * @property {number} nestLevel\n * @property {(token: Token) => boolean} [stopAt]\n * @property {string[]} [textTransforms]\n *\n * @param {Token[]} tokens\n * @returns {Sentence}\n */\nexport default function parse(tokens) {\n const body = [];\n let pos = 0;\n\n while (pos < tokens.length) {\n const state = {\n tokens,\n start: pos,\n stack: body,\n nestLevel: 1,\n };\n\n const next = expr(state);\n\n pos = next.end;\n body.push(next.node);\n }\n\n return {\n type: \"Sentence\",\n body,\n };\n}\n","/* eslint-env browser */\n\nconst NS = \"http://www.w3.org/1998/Math/MathML\";\n\n/**\n * @typedef {Required<import(\"./index.js\").RenderOptions>} Options\n * @param {import(\"../transformer/index.js\").Tag} node\n * @param {Options} options\n * @returns {Element | DocumentFragment}\n */\nexport default function toDOM(node, { bare }) {\n /** @type {Element | DocumentFragment} */\n let element;\n\n if (node.tag === \"math\" && bare) {\n element = document.createDocumentFragment();\n } else {\n element = document.createElementNS(NS, node.tag);\n }\n\n if (element instanceof Element && node.attrs) {\n for (const [name, value] of Object.entries(node.attrs)) {\n element.setAttribute(name, `${value}`);\n }\n }\n\n if (node.textContent) {\n element.textContent = node.textContent;\n }\n\n if (node.childNodes) {\n for (const childNode of node.childNodes) {\n if (childNode) {\n element.appendChild(toDOM(childNode, { bare: false }));\n }\n }\n }\n\n return element;\n}\n","/**\n * @param {string} str\n * @returns {string}\n */\nfunction escapeTextContent(str) {\n return str.replace(/[&<]/g, (c) => {\n if (c === \"&\") {\n return \"&\";\n }\n\n return \"<\";\n });\n}\n\n/**\n * @param {string} str\n * @returns {string}\n */\nfunction escapeAttrValue(str) {\n return str.replace(/\"/g, \""\");\n}\n\n/**\n * @param {import(\"../transformer/index.js\").Tag} node\n * @param {Required<import(\"./index.js\").RenderOptions>} options\n * @returns {string}\n */\nexport default function toString(node, { bare }) {\n const attrString = Object.entries(node.attrs || {})\n .map(([name, value]) => `${name}=\"${escapeAttrValue(`${value}`)}\"`)\n .join(\" \");\n\n const openContent = attrString ? `${node.tag} ${attrString}` : node.tag;\n\n if (node.textContent) {\n const textContent = escapeTextContent(node.textContent);\n return `<${openContent}>${textContent}</${node.tag}>`;\n }\n\n if (node.childNodes) {\n const content = node.childNodes\n .map((child) => (child ? toString(child, { bare: false }) : \"\"))\n .join(\"\");\n\n if (node.tag === \"math\" && bare) {\n return content;\n }\n\n return `<${openContent}>${content}</${node.tag}>`;\n }\n\n return `<${openContent} />`;\n}\n","import toDOM from \"./to-dom.js\";\n\n/**\n * @yields {never}\n */\nfunction* nullIter() {}\n\n/**\n * @template {unknown[]} T - Tuple type with item type of each input iterator\n * @param {{ [K in keyof T]: Iterable<T[K]> }} iterables - The iterators to be\n * zipped\n * @yields {T}\n */\nfunction* zip(iterables) {\n const iterators = iterables.map((iterable) =>\n iterable ? iterable[Symbol.iterator]() : nullIter(),\n );\n\n while (true) {\n const next = iterators.map((iterator) => iterator.next());\n\n if (next.every(({ done }) => done)) {\n return;\n }\n\n yield /** @type {T} */ (next.map(({ value }) => value));\n }\n}\n\n/**\n * @typedef {import(\"../transformer/index.js\").Tag} Tag\n *\n * @param {Element} parent\n * @param {Tag} node\n * @param {Required<import(\"./index.js\").RenderOptions>} options\n * @returns {void}\n */\nexport default function updateDOM(parent, node, options) {\n if (!parent) {\n throw new Error(\"updateDOM called on null\");\n }\n\n if (parent.tagName.toLowerCase() !== node.tag) {\n throw new Error(\"tag name mismatch\");\n }\n\n if (!(node.tag === \"math\" && options.bare)) {\n const desiredAttrs = node.attrs || {};\n const removeAttrs = [];\n\n for (const attr of parent.attributes) {\n const newValue = desiredAttrs[attr.name];\n\n if (!newValue) {\n removeAttrs.push(attr.name);\n } else if (newValue !== attr.value) {\n parent.setAttribute(attr.name, `${newValue}`);\n }\n }\n\n for (const name of removeAttrs) {\n parent.removeAttribute(name);\n }\n\n for (const [name, value] of Object.entries(desiredAttrs)) {\n if (!parent.getAttribute(name)) {\n parent.setAttribute(name, `${value}`);\n }\n }\n }\n\n if ([\"mi\", \"mn\", \"mo\", \"mspace\", \"mtext\"].includes(node.tag)) {\n if (parent.textContent !== node.textContent) {\n parent.textContent = node.textContent ?? \"\";\n }\n\n return;\n }\n\n // Collect in arrays to prevent the live updating from interfering\n // with the schedule.\n const appendChilds = [];\n const removeChilds = [];\n const replaceChilds = [];\n\n for (const [child, desired] of zip(\n /** @type {[HTMLCollection, (Tag | null)[]]} */ ([\n parent.children,\n node.childNodes,\n ]),\n )) {\n if (!child && !desired) {\n continue;\n }\n\n if (!desired) {\n // parent.removeChild(child);\n removeChilds.push(child);\n } else if (!child) {\n // parent.appendChild(toDOM(desired, options));\n appendChilds.push(toDOM(desired, options));\n } else if (child.tagName.toLowerCase() !== desired.tag) {\n // parent.replaceChild(toDOM(desired, options), child);\n replaceChilds.push([child, toDOM(desired, options)]);\n } else {\n updateDOM(child, desired, { bare: false });\n }\n }\n\n for (const child of removeChilds) {\n parent.removeChild(child);\n }\n\n for (const child of appendChilds) {\n parent.appendChild(child);\n }\n\n for (const [oldChild, desired] of replaceChilds) {\n parent.replaceChild(desired, oldChild);\n }\n}\n","/**\n * @typedef {import(\"./index.js\").Token} Token\n * @typedef {(char: string) => boolean} LeximeTest\n */\n\nconst LETTER_RE = /^\\p{L}/u;\n\n/** @type {LeximeTest} */\nexport function isAlphabetic(char) {\n if (!char) {\n return false;\n }\n\n return LETTER_RE.test(char);\n}\n\nconst LETTER_NUMBER_RE = /^[\\p{L}\\p{N}]/u;\n\n/** @type {LeximeTest} */\nexport function isAlphanumeric(char) {\n if (!char) {\n return false;\n }\n\n return LETTER_NUMBER_RE.test(char);\n}\n\nconst MARK_RE = /^\\p{M}/u;\n\n/** @type {LeximeTest} */\nexport function isMark(char) {\n if (!char) {\n return false;\n }\n\n return MARK_RE.test(char);\n}\n\n// Duodecimal literals are in the So category.\nconst NUMBER_RE = /^[\\p{N}\\u{218a}-\\u{218b}]/u;\n\n/** @type {LeximeTest} */\nexport function isNumeric(char) {\n if (!char) {\n return false;\n }\n\n return NUMBER_RE.test(char);\n}\n\n// Invisible opperators are in the Cf category.\nconst OPERATOR_RE = /^[\\p{P}\\p{Sm}\\p{So}\\u{2061}-\\u{2064}]/u;\n\n/** @type {LeximeTest} */\nexport function isOperational(char) {\n if (!char) {\n return false;\n }\n\n return OPERATOR_RE.test(char);\n}\n\nconst PUNCT_OPEN_RE = /^\\p{Pe}/u;\n\n/** @type {LeximeTest} */\nexport function isPunctClose(char) {\n if (!char) {\n return false;\n }\n\n return PUNCT_OPEN_RE.test(char);\n}\n\nconst PUNCT_CLOSE_RE = /^\\p{Ps}/u;\n\n/** @type {LeximeTest} */\nexport function isPunctOpen(char) {\n if (!char) {\n return false;\n }\n\n return PUNCT_CLOSE_RE.test(char);\n}\n\nconst FUNCTION_IDENT_ATTRS = { class: \"mathup-function-ident\" };\n\nexport const KNOWN_IDENTS = new Map([\n [\"CC\", { value: \"ℂ\" }],\n [\"Delta\", { value: \"Δ\", attrs: { mathvariant: \"normal\" } }],\n [\"Gamma\", { value: \"Γ\", attrs: { mathvariant: \"normal\" } }],\n [\"Lambda\", { value: \"Λ\", attrs: { mathvariant: \"normal\" } }],\n [\"NN\", { value: \"ℕ\" }],\n [\"O/\", { value: \"∅\" }],\n [\"Omega\", { value: \"Ω\", attrs: { mathvariant: \"normal\" } }],\n [\"Phi\", { value: \"Φ\", attrs: { mathvariant: \"normal\" } }],\n [\"Pi\", { value: \"Π\", attrs: { mathvariant: \"normal\" } }],\n [\"Psi\", { value: \"Ψ\", attrs: { mathvariant: \"normal\" } }],\n [\"QQ\", { value: \"ℚ\" }],\n [\"RR\", { value: \"ℝ\" }],\n [\"Sigma\", { value: \"Σ\", attrs: { mathvariant: \"normal\" } }],\n [\"Theta\", { value: \"Θ\", attrs: { mathvariant: \"normal\" } }],\n [\"Xi\", { value: \"Ξ\", attrs: { mathvariant: \"normal\" } }],\n [\"ZZ\", { value: \"ℤ\" }],\n [\"alpha\", { value: \"α\" }],\n [\"beta\", { value: \"β\" }],\n [\"chi\", { value: \"χ\" }],\n [\"cos\", { value: \"cos\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"cosh\", { value: \"cosh\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"cot\", { value: \"cot\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"csc\", { value: \"csc\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"cosec\", { value: \"cosec\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"delta\", { value: \"δ\" }],\n [\"det\", { value: \"det\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"dim\", { value: \"dim\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"epsilon\", { value: \"ɛ\" }],\n [\"eta\", { value: \"η\" }],\n [\"gamma\", { value: \"γ\" }],\n [\"gcd\", { value: \"gcd\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"iota\", { value: \"ι\" }],\n [\"kappa\", { value: \"κ\" }],\n [\"lambda\", { value: \"λ\" }],\n [\"lcm\", { value: \"lcm\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"ln\", { value: \"ln\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"log\", { value: \"log\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"max\", { value: \"max\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"min\", { value: \"min\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"mu\", { value: \"μ\" }],\n [\"nu\", { value: \"ν\" }],\n [\"omega\", { value: \"ω\" }],\n [\"oo\", { value: \"∞\" }],\n [\"phi\", { value: \"φ\" }],\n [\"phiv\", { value: \"ϕ\" }],\n [\"pi\", { value: \"π\" }],\n [\"psi\", { value: \"ψ\" }],\n [\"rho\", { value: \"ρ\" }],\n [\"sec\", { value: \"sec\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"sigma\", { value: \"σ\" }],\n [\"sin\", { value: \"sin\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"sinh\", { value: \"sinh\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"tan\", { value: \"tan\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"tanh\", { value: \"tanh\", attrs: { ...FUNCTION_IDENT_ATTRS } }],\n [\"tau\", { value: \"τ\" }],\n [\"theta\", { value: \"θ\" }],\n [\"upsilon\", { value: \"υ\" }],\n [\"xi\", { value: \"ξ\" }],\n [\"zeta\", { value: \"ζ\" }],\n]);\n\nexport const KNOWN_OPS = new Map([\n [\"-\", { value: \"−\" }],\n [\"!=\", { value: \"≠\" }],\n [\"!==\", { value: \"≢\" }],\n [\"!in\", { value: \"∉\" }],\n [\".$\", { value: \"\\u2061\", attrs: { class: \"mathup-function-application\" } }],\n [\".*\", { value: \"\\u2062\", attrs: { class: \"mathup-invisible-times\" } }],\n [\".+\", { value: \"\\u2064\", attrs: { class: \"mathup-invisible-add\" } }],\n [\".,\", { value: \"\\u2063\", attrs: { class: \"mathup-invisible-separator\" } }],\n [\"'\", { value: \"′\", attrs: { lspace: 0, rspace: 0 } }],\n [\"''\", { value: \"″\", attrs: { lspace: 0, rspace: 0 } }],\n [\"'''\", { value: \"‴\", attrs: { lspace: 0, rspace: 0 } }],\n [\"''''\", { value: \"⁗\", attrs: { lspace: 0, rspace: 0 } }],\n [\"*\", { value: \"·\" }],\n [\"**\", { value: \"∗\" }],\n [\"***\", { value: \"⋆\" }],\n [\"+-\", { value: \"±\" }],\n [\"-+\", { value: \"∓\" }],\n [\"-:\", { value: \"÷\" }],\n [\"-<\", { value: \"≺\" }],\n [\"-<=\", { value: \"⪯\" }],\n [\"-=\", { value: \"≡\" }],\n [\"->\", { value: \"→\" }],\n [\"->>\", { value: \"↠\" }],\n [\"...\", { value: \"…\" }],\n [\"//\", { value: \"⁄\" }],\n [\"/_\", { value: \"∠\" }],\n [\"/_\\\\\", { value: \"△\" }],\n [\":.\", { value: \"∴\" }],\n [\":|:\", { value: \"|\", attrs: { stretchy: true }, sep: true }],\n [\"<-\", { value: \"←\" }],\n [\"<<<\", { value: \"≪\" }],\n [\"<=\", { value: \"≤\" }],\n [\"<=>\", { value: \"⇔\" }],\n [\"<>\", { value: \"⋄\" }],\n [\"<|\", { value: \"⊲\" }],\n [\"==\", { value: \"≡\" }],\n [\"=>\", { value: \"⇒\" }],\n [\">-\", { value: \"≻\" }],\n [\">-=\", { value: \"⪰\" }],\n [\">->\", { value: \"↣\" }],\n [\">->>\", { value: \"⤖\" }],\n [\"><|\", { value: \"⋊\" }],\n [\">=\", { value: \"≥\" }],\n [\">>>\", { value: \"≫\" }],\n [\"@\", { value: \"∘\" }],\n [\"AA\", { value: \"∀\" }],\n [\"EE\", { value: \"∃\" }],\n [\"TT\", { value: \"⊤\" }],\n [\"[]\", { value: \"□\" }],\n [\"^^\", { value: \"∧\" }],\n [\"^^^\", { value: \"⋀\" }],\n [\"_|_\", { value: \"⊥\" }],\n [\"aleph\", { value: \"ℵ\" }],\n [\"and\", { value: \"and\" }],\n [\"cdots\", { value: \"⋯\" }],\n [\"darr\", { value: \"↓\" }],\n [\"ddots\", { value: \"⋱\" }],\n [\"del\", { value: \"∂\" }],\n [\"diamond\", { value: \"⋄\" }],\n [\"dint\", { value: \"∬\" }],\n [\"grad\", { value: \"∇\" }],\n [\"hArr\", { value: \"⇔\" }],\n [\"harr\", { value: \"↔\" }],\n [\"if\", { value: \"if\" }],\n [\"iff\", { value: \"⇔\" }],\n [\"in\", { value: \"∈\" }],\n [\"int\", { value: \"∫\" }],\n [\"lArr\", { value: \"⇐\" }],\n [\"larr\", { value: \"←\" }],\n [\"lim\", { value: \"lim\" }],\n [\"mod\", { value: \"mod\" }],\n [\"nn\", { value: \"∩\" }],\n [\"nnn\", { value: \"⋂\" }],\n [\"not\", { value: \"¬\" }],\n [\"o+\", { value: \"⊕\" }],\n [\"o.\", { value: \"⊙\" }],\n [\"oc\", { value: \"∝\" }],\n [\"oint\", { value: \"∮\" }],\n [\"or\", { value: \"or\" }],\n [\"otherwise\", { value: \"otherwise\" }],\n [\"ox\", { value: \"⊗\" }],\n [\"prod\", { value: \"∏\" }],\n [\"prop\", { value: \"∝\" }],\n [\"rArr\", { value: \"⇒\" }],\n [\"rarr\", { value: \"→\" }],\n [\"square\", { value: \"□\" }],\n [\"sub\", { value: \"⊂\" }],\n [\"sube\", { value: \"⊆\" }],\n [\"sum\", { value: \"∑\" }],\n [\"sup\", { value: \"⊃\" }],\n [\"supe\", { value: \"⊇\" }],\n [\"uarr\", { value: \"↑\" }],\n [\"uu\", { value: \"∪\" }],\n [\"uuu\", { value: \"⋃\" }],\n [\"vdots\", { value: \"⋮\" }],\n [\"vv\", { value: \"∨\" }],\n [\"vvv\", { value: \"⋁\" }],\n [\"xx\", { value: \"×\" }],\n [\"|--\", { value: \"⊢\" }],\n [\"|->\", { value: \"↦\" }],\n [\"|==\", { value: \"⊨\" }],\n [\"|>\", { value: \"⊳\" }],\n [\"|><\", { value: \"⋉\" }],\n [\"|><|\", { value: \"⋈\" }],\n [\"||\", { value: \"∥\" }],\n [\"~=\", { value: \"≅\" }],\n [\"~~\", { value: \"≈\" }],\n]);\n\n/** @type {Map<string, Omit<Token, \"type\">>} */\nexport const KNOWN_PARENS_OPEN = new Map([\n [\"(:\", { value: \"⟨\" }],\n [\"<<\", { value: \"⟨\" }],\n [\"{:\", { value: \"\" }],\n [\"|(\", { value: \"|\" }],\n [\"|:\", { value: \"|\" }],\n [\"|__\", { value: \"⌊\" }],\n [\"||(\", { value: \"‖\" }],\n [\"||:\", { value: \"‖\" }],\n [\"|~\", { value: \"⌈\" }],\n [\n \"(mod\",\n {\n value: \"(\",\n attrs: { lspace: \"1.65ex\" },\n extraTokensAfter: [\n { type: \"operator\", value: \"mod\", attrs: { lspace: 0 } },\n ],\n },\n ],\n]);\n\nexport const KNOWN_PARENS_CLOSE = new Map([\n [\")|\", { value: \"|\" }],\n [\")||\", { value: \"‖\" }],\n [\":)\", { value: \"⟩\" }],\n [\":|\", { value: \"|\" }],\n [\":||\", { value: \"‖\" }],\n [\":}\", { value: \"\" }],\n [\">>\", { value: \"⟩\" }],\n [\"__|\", { value: \"⌋\" }],\n [\"~|\", { value: \"⌉\" }],\n]);\n\nexport const KNOWN_PREFIX = new Map([\n // Accents\n [\"bar\", { name: \"over\", accent: \"‾\" }],\n [\"ddot\", { name: \"over\", accent: \"⋅⋅\" }],\n [\"dot\", { name: \"over\", accent: \"⋅\" }],\n [\"hat\", { name: \"over\", accent: \"^\" }],\n [\"obrace\", { name: \"over\", accent: \"⏞\" }],\n [\"obracket\", { name: \"over\", accent: \"⎴\" }],\n [\"oparen\", { name: \"over\", accent: \"⏜\" }],\n [\"oshell\", { name: \"over\", accent: \"⏠\" }],\n [\"tilde\", { name: \"over\", accent: \"˜\" }],\n [\"ubrace\", { name: \"under\", accent: \"⏟\" }],\n [\"ubrace\", { name: \"under\", accent: \"⏟\" }],\n [\"ubracket\", { name: \"under\", accent: \"⎵\" }],\n [\"ul\", { name: \"under\", accent: \"_\" }],\n [\"uparen\", { name: \"under\", accent: \"⏝\" }],\n [\"ushell\", { name: \"under\",