UNPKG

dt-sql-parser

Version:

SQL Parsers for BigData, built with antlr4

134 lines (133 loc) 5.95 kB
import { processTokenCandidates } from '../common/tokenUtils'; import { TrinoSqlLexer } from '../../lib/trino/TrinoSqlLexer'; import { TrinoSqlParser } from '../../lib/trino/TrinoSqlParser'; import { BasicSQL } from '../common/basicSQL'; import { EntityContextType, } from '../common/types'; import { TrinoEntityCollector } from './trinoEntityCollector'; import { TrinoErrorListener } from './trinoErrorListener'; import { TrinoSqlSplitListener } from './trinoSplitListener'; import { TrinoSemanticContextCollector } from './trinoSemanticContextCollector'; export { TrinoEntityCollector, TrinoSqlSplitListener }; export class TrinoSQL extends BasicSQL { constructor() { super(...arguments); this.preferredRules = new Set([ TrinoSqlParser.RULE_catalogRef, TrinoSqlParser.RULE_catalogNameCreate, TrinoSqlParser.RULE_schemaRef, TrinoSqlParser.RULE_schemaNameCreate, TrinoSqlParser.RULE_tableRef, TrinoSqlParser.RULE_tableNameCreate, TrinoSqlParser.RULE_viewRef, TrinoSqlParser.RULE_viewNameCreate, TrinoSqlParser.RULE_functionName, TrinoSqlParser.RULE_functionNameCreate, TrinoSqlParser.RULE_columnRef, TrinoSqlParser.RULE_columnName, TrinoSqlParser.RULE_columnNameCreate, ]); } createLexerFromCharStream(charStreams) { return new TrinoSqlLexer(charStreams); } createParserFromTokenStream(tokenStream) { return new TrinoSqlParser(tokenStream); } get splitListener() { return new TrinoSqlSplitListener(); } createErrorListener(_errorListener) { const parserContext = this; return new TrinoErrorListener(_errorListener, parserContext, this.preferredRules); } createEntityCollector(input, allTokens, caretTokenIndex) { return new TrinoEntityCollector(input, allTokens, caretTokenIndex); } createSemanticContextCollector(input, caretPosition, allTokens, options) { return new TrinoSemanticContextCollector(input, caretPosition, allTokens, options); } processCandidates(candidates, allTokens, caretTokenIndex) { const originalSyntaxSuggestions = []; const keywords = []; for (let candidate of candidates.rules) { const [ruleType, candidateRule] = candidate; const tokenRanges = allTokens.slice(candidateRule.startTokenIndex, caretTokenIndex + 1); let syntaxContextType = void 0; switch (ruleType) { case TrinoSqlParser.RULE_catalogRef: { syntaxContextType = EntityContextType.CATALOG; break; } case TrinoSqlParser.RULE_catalogNameCreate: { syntaxContextType = EntityContextType.CATALOG_CREATE; break; } case TrinoSqlParser.RULE_schemaRef: { syntaxContextType = EntityContextType.DATABASE; break; } case TrinoSqlParser.RULE_schemaNameCreate: { syntaxContextType = EntityContextType.DATABASE_CREATE; break; } case TrinoSqlParser.RULE_tableRef: { syntaxContextType = EntityContextType.TABLE; break; } case TrinoSqlParser.RULE_tableNameCreate: { syntaxContextType = EntityContextType.TABLE_CREATE; break; } case TrinoSqlParser.RULE_viewRef: { syntaxContextType = EntityContextType.VIEW; break; } case TrinoSqlParser.RULE_viewNameCreate: { syntaxContextType = EntityContextType.VIEW_CREATE; break; } case TrinoSqlParser.RULE_functionName: { syntaxContextType = EntityContextType.FUNCTION; break; } case TrinoSqlParser.RULE_functionNameCreate: { syntaxContextType = EntityContextType.FUNCTION_CREATE; break; } case TrinoSqlParser.RULE_columnNameCreate: { syntaxContextType = EntityContextType.COLUMN_CREATE; break; } case TrinoSqlParser.RULE_columnRef: { syntaxContextType = EntityContextType.COLUMN; break; } case TrinoSqlParser.RULE_columnName: { if (candidateRule.ruleList.includes(TrinoSqlParser.RULE_groupBy) || candidateRule.ruleList.includes(TrinoSqlParser.RULE_sortItem) || candidateRule.ruleList.includes(TrinoSqlParser.RULE_whereClause) || candidateRule.ruleList.includes(TrinoSqlParser.RULE_havingClause) || candidateRule.ruleList.includes(TrinoSqlParser.RULE_partitionBy) || candidateRule.ruleList.includes(TrinoSqlParser.RULE_whenClause) || candidateRule.ruleList.includes(TrinoSqlParser.RULE_relation)) { syntaxContextType = EntityContextType.COLUMN; } } default: break; } if (syntaxContextType) { originalSyntaxSuggestions.push({ syntaxContextType, wordRanges: tokenRanges, }); } } const processedKeywords = processTokenCandidates(this._parser, candidates.tokens); keywords.push(...processedKeywords); return { syntax: originalSyntaxSuggestions, keywords, }; } }