antlr-ng
Version:
Next generation ANTLR Tool
117 lines (116 loc) • 4.44 kB
JavaScript
import { program } from "commander";
import {
CharStream,
CommonToken,
CommonTokenStream,
DiagnosticErrorListener,
Lexer,
Parser,
PredictionMode
} from "antlr4ng";
import { readFile } from "fs/promises";
import { resolve } from "path";
import { parseBoolean } from "./cli-options.js";
program.argument("<grammar>", "The path of the grammar with no extension").argument("<startRuleName>", "Name of the start rule").option("--tree", "Print out the parse tree", parseBoolean, false).option("--tokens", "Print out the tokens for each input symbol", parseBoolean, false).option("--trace", "Print out tracing information (rule enter/exit etc.).", parseBoolean, false).option("--diagnostics", "Print out diagnostic information", parseBoolean, false).option("--sll", "Use SLL prediction mode (instead of LL)", parseBoolean, false).argument("[inputFiles...]", "Input files").action((grammar, startRuleName, inputFiles, options) => {
console.log("\nGrammar:", grammar);
console.log("Start Rule:", startRuleName);
console.log("Input Files:", inputFiles);
console.log("Options: ", options);
console.log();
}).parse();
const testRigOptions = program.opts();
testRigOptions.grammar = program.args[0];
testRigOptions.startRuleName = program.args[1];
testRigOptions.inputFiles = program.args.slice(2);
class TestRig {
static LEXER_START_RULE_NAME = "tokens";
async run() {
const lexerName = resolve(testRigOptions.grammar + "Lexer");
const lexer = await this.loadClass(Lexer, lexerName + ".ts");
let parser;
if (testRigOptions.startRuleName !== TestRig.LEXER_START_RULE_NAME) {
const parserName = resolve(testRigOptions.grammar + "Parser");
parser = await this.loadClass(Parser, parserName + ".ts");
}
const files = testRigOptions.inputFiles ?? [];
for (const inputFile of files) {
const content = await readFile(resolve(inputFile), { encoding: "utf-8" });
const charStream = CharStream.fromString(content);
if (files.length > 1) {
console.log(inputFile);
}
this.process(charStream, lexer, parser);
}
}
process(input, lexer, parser) {
lexer.inputStream = input;
const tokens = new CommonTokenStream(lexer);
tokens.fill();
if (testRigOptions.tokens) {
for (const tok of tokens.getTokens()) {
if (tok instanceof CommonToken) {
console.log(tok.toString(lexer));
} else {
console.log(tok.toString());
}
}
}
if (testRigOptions.startRuleName === TestRig.LEXER_START_RULE_NAME) {
return;
}
if (!parser) {
throw new Error("Parser is required for non-lexer start rule");
}
if (testRigOptions.diagnostics) {
parser.addErrorListener(new DiagnosticErrorListener());
parser.interpreter.predictionMode = PredictionMode.LL_EXACT_AMBIG_DETECTION;
}
if (testRigOptions.tree) {
parser.buildParseTrees = true;
}
if (testRigOptions.sll) {
parser.interpreter.predictionMode = PredictionMode.SLL;
}
parser.tokenStream = tokens;
parser.setTrace(testRigOptions.trace ?? false);
let tree;
if (typeof parser[testRigOptions.startRuleName] === "function") {
tree = parser[testRigOptions.startRuleName]();
} else {
console.error(`Method ${testRigOptions.startRuleName} not found in the class or is not a function`);
}
if (testRigOptions.tree && tree) {
console.log(tree.toStringTree(parser));
}
}
async loadClass(t, fileName) {
try {
const module = await import(fileName);
const extendsClass = (child, parent) => {
let proto = child.prototype;
while (proto) {
if (proto.constructor.name === parent.prototype.constructor.name) {
return true;
}
proto = Object.getPrototypeOf(proto);
}
return false;
};
const targetClass = Object.values(module).find((candidate) => {
return typeof candidate === "function" && candidate.prototype instanceof Object && candidate !== t && extendsClass(candidate, t);
});
if (!targetClass) {
throw new Error("Could not find a recognizer class in " + fileName);
}
return new targetClass();
} catch (e) {
throw new Error(`Could not load class ${t.name} from ${fileName}: ${e}`);
}
}
}
const testRig = new TestRig();
await testRig.run();
export {
TestRig
};