rawsql-ts
Version:
[beta]High-performance SQL parser and AST analyzer written in TypeScript. Provides fast parsing and advanced transformation capabilities.
45 lines • 1.76 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FunctionTokenReader = void 0;
const BaseTokenReader_1 = require("./BaseTokenReader");
const Lexeme_1 = require("../models/Lexeme");
const stringUtils_1 = require("../utils/stringUtils");
const KeywordTrie_1 = require("../models/KeywordTrie");
const KeywordParser_1 = require("../parsers/KeywordParser");
const trie = new KeywordTrie_1.KeywordTrie([
["grouping", "sets"],
]);
const keywordParser = new KeywordParser_1.KeywordParser(trie);
/**
* Reads SQL identifier tokens
*/
class FunctionTokenReader extends BaseTokenReader_1.BaseTokenReader {
/**
* Try to read an identifier token
*/
tryRead(previous) {
if (this.isEndOfInput()) {
return null;
}
// Check for keyword identifiers
const keyword = keywordParser.parse(this.input, this.position);
if (keyword !== null) {
this.position = keyword.newPosition;
return this.createLexeme(Lexeme_1.TokenType.Function, keyword.keyword);
}
// Regular identifier
const result = stringUtils_1.StringUtils.tryReadRegularIdentifier(this.input, this.position);
if (!result) {
return null;
}
this.position = result.newPosition;
// peek next token
var shift = stringUtils_1.StringUtils.readWhiteSpaceAndComment(this.input, this.position).position - this.position;
if (this.canRead(shift) && this.input[this.position + shift] === '(') {
return this.createLexeme(Lexeme_1.TokenType.Function, result.identifier);
}
return null;
}
}
exports.FunctionTokenReader = FunctionTokenReader;
//# sourceMappingURL=FunctionTokenReader.js.map