rawsql-ts
Version:
High-performance SQL parser and AST analyzer written in TypeScript. Provides fast parsing and advanced transformation capabilities.
65 lines • 2.57 kB
JavaScript
import { BaseTokenReader } from './BaseTokenReader';
import { TokenType } from '../models/Lexeme';
import { StringUtils } from '../utils/stringUtils';
import { KeywordTrie } from '../models/KeywordTrie';
import { KeywordParser } from '../parsers/KeywordParser';
// Use KeywordTrie to identify type names composed of multiple words.
const trie = new KeywordTrie([
// type
["double", "precision"],
["character", "varying"],
["time", "without", "time", "zone"],
["time", "with", "time", "zone"],
["timestamp", "without", "time", "zone"],
["timestamp", "with", "time", "zone"],
]);
const typeParser = new KeywordParser(trie);
/**
* Reads SQL identifier tokens
*/
export class TypeTokenReader extends BaseTokenReader {
/**
* Try to read an identifier token
*/
tryRead(previous) {
var _a;
if (this.isEndOfInput()) {
return null;
}
// Check for keyword identifiers
const keyword = typeParser.parse(this.input, this.position);
if (keyword !== null) {
this.position = keyword.newPosition;
const lexeme = this.createLexeme(TokenType.Type, keyword.keyword);
// Preserve inline comments that may appear after multi-word type keywords (e.g., "timestamp with time zone -- note").
if (keyword.comments && keyword.comments.length > 0) {
const existing = (_a = lexeme.positionedComments) !== null && _a !== void 0 ? _a : [];
lexeme.positionedComments = [
...existing,
{ position: 'after', comments: [...keyword.comments] }
];
}
return lexeme;
}
// check pervious token
if (previous === null) {
return null;
}
const result = StringUtils.tryReadRegularIdentifier(this.input, this.position);
if (!result) {
return null;
}
this.position = result.newPosition;
// type cast command
if (previous.type & TokenType.Command && previous.value === "as") {
// If the previous token is the `as` keyword, it could be a type cast or an identifier
return this.createLexeme(TokenType.Identifier | TokenType.Type, result.identifier);
}
// postgres type conversion
if (previous.type & TokenType.Operator && previous.value === "::") {
return this.createLexeme(TokenType.Type, result.identifier);
}
return null;
}
}
//# sourceMappingURL=TypeTokenReader.js.map