UNPKG

@abaplint/core

Version:
291 lines • 11.9 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.StatementParser = exports.STATEMENT_MAX_TOKENS = void 0; const Statements = require("./statements"); const Expressions = require("./expressions"); const Tokens = require("../1_lexer/tokens"); const nodes_1 = require("../nodes"); const artifacts_1 = require("../artifacts"); const combi_1 = require("./combi"); const _statement_1 = require("./statements/_statement"); const expand_macros_1 = require("./expand_macros"); const tokens_1 = require("../1_lexer/tokens"); exports.STATEMENT_MAX_TOKENS = 1000; class StatementMap { constructor() { this.map = {}; for (const stat of artifacts_1.ArtifactsABAP.getStatements()) { const f = stat.getMatcher().first(); if (f.length === 0) { throw new Error("StatementMap, first must have contents"); } for (const first of f) { if (this.map[first]) { this.map[first].push({ statement: stat }); } else { this.map[first] = [{ statement: stat }]; } } } } lookup(str) { const res = this.map[str.toUpperCase()]; if (res === undefined) { return []; } if (res[0].matcher === undefined) { for (const r of res) { r.matcher = r.statement.getMatcher(); } } return res; } } class WorkArea { constructor(file, tokens) { this.file = file; this.tokens = tokens; this.statements = []; } addUnknown(pre, post, colon) { const st = new nodes_1.StatementNode(new _statement_1.Unknown(), colon); st.setChildren(this.tokensToNodes(pre, post)); this.statements.push(st); } toResult() { return { file: this.file, tokens: this.tokens, statements: this.statements }; } tokensToNodes(tokens1, tokens2) { const ret = []; for (const t of tokens1) { ret.push(new nodes_1.TokenNode(t)); } for (const t of tokens2) { ret.push(new nodes_1.TokenNode(t)); } return ret; } } class StatementParser { constructor(version, reg) { if (!StatementParser.map) { StatementParser.map = new StatementMap(); } this.version = version; this.reg = reg; } /** input is one full object */ run(input, globalMacros) { const macros = new expand_macros_1.ExpandMacros(globalMacros, this.version, this.reg); const wa = input.map(i => new WorkArea(i.file, i.tokens)); for (const w of wa) { this.process(w); this.categorize(w); macros.find(w.statements, w.file); } for (const w of wa) { const res = macros.handleMacros(w.statements, w.file); w.statements = res.statements; if (res.containsUnknown === true) { this.lazyUnknown(w); } this.nativeSQL(w); } return wa.map(w => w.toResult()); } // todo, refactor, remove method here and only have in WorkArea class tokensToNodes(tokens) { const ret = []; for (const t of tokens) { ret.push(new nodes_1.TokenNode(t)); } return ret; } // tries to split Unknown statements by newlines, when adding/writing a new statement // in an editor, adding the statement terminator is typically the last thing to do // note: this will not work if the second statement is a macro call, guess this is okay lazyUnknown(wa) { const result = []; for (let statement of wa.statements) { // dont use CALL METHOD, when executing lazy, it easily gives a Move for the last statment if lazy logic is evaluated if (statement.get() instanceof _statement_1.Unknown) { const concat = statement.concatTokens().toUpperCase(); if (concat.startsWith("CALL METHOD ") === false && concat.startsWith("RAISE EXCEPTION TYPE ") === false && concat.startsWith("READ TABLE ") === false && concat.startsWith("LOOP AT ") === false && concat.startsWith("CALL FUNCTION ") === false) { for (const { first, second } of this.buildSplits(statement.getTokens())) { if (second.length === 1) { continue; // probably punctuation } const s = this.categorizeStatement(new nodes_1.StatementNode(new _statement_1.Unknown()).setChildren(this.tokensToNodes(second))); if (!(s.get() instanceof _statement_1.Unknown) && !(s.get() instanceof _statement_1.Empty)) { result.push(new nodes_1.StatementNode(new _statement_1.Unknown()).setChildren(this.tokensToNodes(first))); statement = s; break; } } } } result.push(statement); } wa.statements = result; } buildSplits(tokens) { const res = []; const before = []; let prevRow = tokens[0].getRow(); for (let i = 0; i < tokens.length; i++) { if (tokens[i].getRow() !== prevRow) { res.push({ first: [...before], second: [...tokens].splice(i) }); } prevRow = tokens[i].getRow(); before.push(tokens[i]); } return res; } nativeSQL(wa) { let sql = false; for (let i = 0; i < wa.statements.length; i++) { const statement = wa.statements[i]; const type = statement.get(); if (type instanceof Statements.ExecSQL || (type instanceof Statements.MethodImplementation && statement.findDirectExpression(Expressions.Language))) { sql = true; } else if (sql === true) { if (type instanceof Statements.EndExec || type instanceof Statements.EndMethod) { sql = false; } else { wa.statements[i] = new nodes_1.StatementNode(new _statement_1.NativeSQL()).setChildren(this.tokensToNodes(statement.getTokens())); if (statement.concatTokens().toUpperCase().endsWith("ENDMETHOD.")) { const tokens = statement.getTokens(); const startTokens = this.tokensToNodes(tokens.slice(tokens.length - 2, tokens.length)); const endTokens = this.tokensToNodes(tokens.slice(0, tokens.length - 2)); wa.statements[i] = new nodes_1.StatementNode(new _statement_1.NativeSQL()).setChildren(endTokens); const item = new nodes_1.StatementNode(new Statements.EndMethod()).setChildren(startTokens); wa.statements.splice(i + 1, 0, item); sql = false; } } } } } // for each statement, run statement matchers to figure out which kind of statement it is categorize(wa) { const result = []; for (const statement of wa.statements) { result.push(this.categorizeStatement(statement)); } wa.statements = result; } categorizeStatement(input) { let statement = input; const length = input.getChildren().length; const lastToken = input.getLastToken(); const isPunctuation = lastToken instanceof Tokens.Punctuation; if (length === 1 && isPunctuation) { const tokens = statement.getTokens(); statement = new nodes_1.StatementNode(new _statement_1.Empty()).setChildren(this.tokensToNodes(tokens)); } else if (statement.get() instanceof _statement_1.Unknown) { if (isPunctuation) { statement = this.match(statement); } else if (length > exports.STATEMENT_MAX_TOKENS) { // if the statement contains more than STATEMENT_MAX_TOKENS tokens, just give up statement = input; } else if (length === 1 && lastToken instanceof tokens_1.Pragma) { statement = new nodes_1.StatementNode(new _statement_1.Empty(), undefined, [lastToken]); } } return statement; } removePragma(tokens) { const result = []; const pragmas = []; // skip the last token as it is the punctuation for (let i = 0; i < tokens.length - 1; i++) { const t = tokens[i]; if (t instanceof Tokens.Pragma) { pragmas.push(t); } else { result.push(t); } } return { tokens: result, pragmas: pragmas }; } match(statement) { const tokens = statement.getTokens(); const { tokens: filtered, pragmas } = this.removePragma(tokens); if (filtered.length === 0) { return new nodes_1.StatementNode(new _statement_1.Empty()).setChildren(this.tokensToNodes(tokens)); } for (const st of StatementParser.map.lookup(filtered[0].getStr())) { const match = combi_1.Combi.run(st.matcher, filtered, this.version); if (match) { const last = tokens[tokens.length - 1]; match.push(new nodes_1.TokenNode(last)); return new nodes_1.StatementNode(st.statement, statement.getColon(), pragmas).setChildren(match); } } // next try the statements without specific keywords for (const st of StatementParser.map.lookup("")) { const match = combi_1.Combi.run(st.matcher, filtered, this.version); if (match) { const last = tokens[tokens.length - 1]; match.push(new nodes_1.TokenNode(last)); return new nodes_1.StatementNode(st.statement, statement.getColon(), pragmas).setChildren(match); } } return statement; } // takes care of splitting tokens into statements, also handles chained statements // statements are split by "," or "." // additional colons/chaining after the first colon are ignored process(wa) { let add = []; let pre = []; let colon = undefined; for (const token of wa.tokens) { if (token instanceof Tokens.Comment) { wa.statements.push(new nodes_1.StatementNode(new _statement_1.Comment()).setChildren(this.tokensToNodes([token]))); continue; } add.push(token); const str = token.getStr(); if (str.length === 1) { if (str === ".") { wa.addUnknown(pre, add, colon); add = []; pre = []; colon = undefined; } else if (str === "," && pre.length > 0) { wa.addUnknown(pre, add, colon); add = []; } else if (str === ":" && colon === undefined) { colon = token; add.pop(); // do not add colon token to statement pre.push(...add); add = []; } else if (str === ":") { add.pop(); // do not add colon token to statement } } } if (add.length > 0) { wa.addUnknown(pre, add, colon); } } } exports.StatementParser = StatementParser; //# sourceMappingURL=statement_parser.js.map