antlr-ng
Version:
Next generation ANTLR Tool
223 lines (222 loc) • 7.71 kB
JavaScript
var __defProp = Object.defineProperty;
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
import { Token } from "antlr4ng";
import { ANTLRv4Parser } from "../generated/ANTLRv4Parser.js";
import { Constants } from "../Constants.js";
import { LeftRecursiveRuleTransformer } from "../analysis/LeftRecursiveRuleTransformer.js";
import { isTokenName } from "../support/helpers.js";
import { IssueCode } from "../tool/Issues.js";
import { Grammar } from "../tool/Grammar.js";
import { LexerGrammar } from "../tool/LexerGrammar.js";
import { AttributeChecks } from "./AttributeChecks.js";
import { BasicSemanticChecks } from "./BasicSemanticChecks.js";
import { RuleCollector } from "./RuleCollector.js";
import { SymbolChecks } from "./SymbolChecks.js";
import { SymbolCollector } from "./SymbolCollector.js";
import { UseDefAnalyzer } from "./UseDefAnalyzer.js";
class SemanticPipeline {
constructor(g) {
this.g = g;
}
static {
__name(this, "SemanticPipeline");
}
process() {
const ruleCollector = new RuleCollector(this.g);
ruleCollector.process(this.g.ast);
let prevErrors = this.g.tool.errorManager.errors;
const basics = new BasicSemanticChecks(this.g, ruleCollector);
basics.process();
if (this.g.tool.errorManager.errors > prevErrors) {
return;
}
prevErrors = this.g.tool.errorManager.errors;
const transformer = new LeftRecursiveRuleTransformer(
this.g.ast,
Array.from(ruleCollector.nameToRuleMap.values()),
this.g
);
transformer.translateLeftRecursiveRules();
if (this.g.tool.errorManager.errors > prevErrors) {
return;
}
for (const r of ruleCollector.nameToRuleMap.values()) {
this.g.defineRule(r);
}
const collector = new SymbolCollector(this.g);
collector.process(this.g.ast);
const symbolChecker = new SymbolChecks(this.g, collector);
symbolChecker.process();
for (const a of collector.namedActions) {
this.g.defineAction(a);
}
for (const r of this.g.rules.values()) {
for (let i = 1; i <= r.numberOfAlts; i++) {
r.alt[i].ast.alt = r.alt[i];
}
}
this.g.importTokensFromTokensFile();
if (this.g.isLexer()) {
this.assignLexerTokenTypes(this.g, collector.tokensDefs);
} else {
this.assignTokenTypes(
this.g,
collector.tokensDefs,
collector.tokenIDRefs,
collector.terminals
);
}
symbolChecker.checkForModeConflicts(this.g);
symbolChecker.checkForUnreachableTokens(this.g);
this.assignChannelTypes(this.g, collector.channelDefs);
symbolChecker.checkRuleArgs(this.g, collector.ruleRefs);
this.identifyStartRules(collector);
symbolChecker.checkForQualifiedRuleIssues(this.g, collector.qualifiedRuleRefs);
if (this.g.tool.getNumErrors() > 0) {
return;
}
AttributeChecks.checkAllAttributeExpressions(this.g);
UseDefAnalyzer.trackTokenRuleRefsInActions(this.g);
}
identifyStartRules(collector) {
for (const ref of collector.ruleRefs) {
const ruleName = ref.getText();
const r = this.g.getRule(ruleName);
if (r !== null) {
r.isStartRule = false;
}
}
}
assignLexerTokenTypes(g, tokensDefs) {
const grammar = g.getOutermostGrammar();
for (const def of tokensDefs) {
if (isTokenName(def.getText())) {
grammar.defineTokenName(def.getText());
}
}
for (const r of g.rules.values()) {
if (!r.isFragment() && !this.hasTypeOrMoreCommand(r)) {
grammar.defineTokenName(r.name);
}
}
const litAliases = Grammar.getStringLiteralAliasesFromLexerRules(g.ast);
const conflictingLiterals = /* @__PURE__ */ new Set();
if (litAliases !== null) {
for (const [nameAST, litAST] of litAliases) {
if (!grammar.stringLiteralToTypeMap.has(litAST.getText())) {
grammar.defineTokenAlias(nameAST.getText(), litAST.getText());
} else {
conflictingLiterals.add(litAST.getText());
}
}
for (const lit of conflictingLiterals) {
const value = grammar.stringLiteralToTypeMap.get(lit);
grammar.stringLiteralToTypeMap.delete(lit);
if (value !== void 0 && value > 0 && value < grammar.typeToStringLiteralList.length && lit === grammar.typeToStringLiteralList[value]) {
grammar.typeToStringLiteralList[value] = null;
}
}
}
}
hasTypeOrMoreCommand(r) {
const ast = r.ast;
const altActionAst = ast.getFirstDescendantWithType(ANTLRv4Parser.LEXER_ALT_ACTION);
if (altActionAst === null) {
return false;
}
for (let i = 1; i < altActionAst.children.length; i++) {
const node = altActionAst.children[i];
if (node.getType() === ANTLRv4Parser.LEXER_ACTION_CALL) {
if (node.children[0].getText() === "type") {
return true;
}
} else if (node.getText() === "more") {
return true;
}
}
return false;
}
assignTokenTypes(g, tokensDefs, tokenIDs, terminals) {
for (const alias of tokensDefs) {
if (g.getTokenType(alias.getText()) !== Token.INVALID_TYPE) {
this.g.tool.errorManager.grammarError(
IssueCode.TokenNameReassignment,
g.fileName,
alias.token,
alias.getText()
);
}
g.defineTokenName(alias.getText());
}
for (const idAST of tokenIDs) {
if (g.getTokenType(idAST.getText()) === Token.INVALID_TYPE) {
this.g.tool.errorManager.grammarError(
IssueCode.ImplicitTokenDefinition,
g.fileName,
idAST.token,
idAST.getText()
);
}
g.defineTokenName(idAST.getText());
}
for (const termAST of terminals) {
if (termAST.getType() !== ANTLRv4Parser.STRING_LITERAL) {
continue;
}
if (g.getTokenType(termAST.getText()) === Token.INVALID_TYPE) {
this.g.tool.errorManager.grammarError(
IssueCode.ImplicitStringDefinition,
g.fileName,
termAST.token,
termAST.getText()
);
}
}
g.tool.logInfo({ component: "semantics", msg: "tokens=" + JSON.stringify(g.tokenNameToTypeMap.keys()) });
g.tool.logInfo({ component: "semantics", msg: "strings=" + JSON.stringify(g.stringLiteralToTypeMap.keys()) });
}
/**
* Assign constant values to custom channels defined in a grammar.
*
* @param g The grammar.
* @param channelDefs A collection of AST nodes defining individual channels within a `channels{}` block
* in the grammar.
*/
assignChannelTypes(g, channelDefs) {
const outermost = g.getOutermostGrammar();
for (const channel of channelDefs) {
const channelName = channel.getText();
if (g.getTokenType(channelName) !== Token.INVALID_TYPE) {
this.g.tool.errorManager.grammarError(
IssueCode.ChannelConflictsWithToken,
g.fileName,
channel.token,
channelName
);
}
if (Constants.COMMON_CONSTANTS.has(channelName)) {
this.g.tool.errorManager.grammarError(
IssueCode.ChannelConflictsWithCommonConstants,
g.fileName,
channel.token,
channelName
);
}
if (outermost instanceof LexerGrammar) {
const lexerGrammar = outermost;
if (lexerGrammar.modes.has(channelName)) {
this.g.tool.errorManager.grammarError(
IssueCode.ChannelConflictsWithMode,
g.fileName,
channel.token,
channelName
);
}
}
outermost.defineChannelName(channel.getText());
}
}
}
export {
SemanticPipeline
};