UNPKG

rawsql-ts

Version:

[beta]High-performance SQL parser and AST analyzer written in TypeScript. Provides fast parsing and advanced transformation capabilities.

151 lines 4.56 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.CommandTokenReader = exports.joinkeywordParser = void 0; const BaseTokenReader_1 = require("./BaseTokenReader"); const Lexeme_1 = require("../models/Lexeme"); const KeywordTrie_1 = require("../models/KeywordTrie"); const KeywordParser_1 = require("../parsers/KeywordParser"); // Commands are those that require a dedicated parser. // Keywords composed of multiple words are also considered commands. // The exception is "type". Since types can be user-defined and cannot be accurately identified, they are treated as Identifiers. const joinTrie = new KeywordTrie_1.KeywordTrie([ ["join"], ["inner", "join"], ["cross", "join"], ["left", "join"], ["left", "outer", "join"], ["right", "join"], ["right", "outer", "join"], ["full", "join"], ["full", "outer", "join"], ["natural", "join"], ["natural", "inner", "join"], ["natural", "left", "join"], ["natural", "left", "outer", "join"], ["natural", "right", "join"], ["natural", "right", "outer", "join"], ["natural", "full", "join"], ["natural", "full", "outer", "join"], ]); const keywordTrie = new KeywordTrie_1.KeywordTrie([ ["with"], ["recursive"], ["materialized"], ["not", "materialized"], ["select"], ["from"], ["distinct"], ["distinct", "on"], ["where"], ["group", "by"], ["having"], ["order", "by"], ["limit"], ["offset"], ["fetch"], ["first"], ["next"], ["row"], ["row", "only"], ["rows", "only"], ["percent"], ["percent", "with", "ties"], // for ["for"], ["update"], ["share"], ["key", "share"], ["no", "key", "update"], // set operations ["union"], ["union", "all"], ["intersect"], ["intersect", "all"], ["except"], ["except", "all"], // between and ["beteen"], // window functions ["window"], ["over"], ["partition", "by"], ["range"], ["rows"], ["groups"], // window frame ["current", "row"], ["unbounded", "preceding"], ["unbounded", "following"], ["preceding"], ["following"], // table join commands ["on"], ["using"], ["lateral"], // case ["case"], ["case", "when"], ["when"], ["then"], ["else"], ["end"], // others ["insert", "into"], ["update"], ["delete", "from"], ["merge", "into"], ["matched"], ["not", "matched"], ["update", "set"], ["do", "nothing"], ["values"], ["set"], ["returning"], ["create", "table"], ["create", "temporary", "table"], ["tablesample"], ["array"], // cast ["as"], // odrder ["asc"], ["desc"], ["nulls", "first"], ["nulls", "last"], ]); const keywordParser = new KeywordParser_1.KeywordParser(keywordTrie); exports.joinkeywordParser = new KeywordParser_1.KeywordParser(joinTrie); class CommandTokenReader extends BaseTokenReader_1.BaseTokenReader { tryRead(previous) { if (this.isEndOfInput()) { return null; } const keywordJoin = exports.joinkeywordParser.parse(this.input, this.position); if (keywordJoin !== null) { this.position = keywordJoin.newPosition; return this.createLexeme(Lexeme_1.TokenType.Command, keywordJoin.keyword); } // Check for keyword identifiers const keyword = keywordParser.parse(this.input, this.position); if (keyword !== null) { this.position = keyword.newPosition; return this.createLexeme(Lexeme_1.TokenType.Command, keyword.keyword); } // check hint clause if (this.canRead(2) && this.input[this.position] === '/' && this.input[this.position + 1] === '*' && this.input[this.position + 2] === '+') { this.position += 3; const start = this.position; while (this.position + 1 < this.input.length) { if (this.input[this.position] === '*' && this.input[this.position + 1] === '/') { this.position += 2; return this.createLexeme(Lexeme_1.TokenType.Command, '/*+ ' + this.input.slice(start, this.position - 2).trim() + ' */'); } this.position++; } throw new Error(`Block comment is not closed. position: ${this.position}`); } return null; } } exports.CommandTokenReader = CommandTokenReader; //# sourceMappingURL=CommandTokenReader.js.map