UNPKG

rawsql-ts

Version:

High-performance SQL parser and AST analyzer written in TypeScript. Provides fast parsing and advanced transformation capabilities.

245 lines 10.8 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.AlterSequenceParser = exports.CreateSequenceParser = void 0; const DDLStatements_1 = require("../models/DDLStatements"); const FullNameParser_1 = require("./FullNameParser"); const ValueComponent_1 = require("../models/ValueComponent"); const SqlTokenizer_1 = require("./SqlTokenizer"); const ValueParser_1 = require("./ValueParser"); const CREATE_SEQUENCE_COMMANDS = new Set([ "create sequence", "create temporary sequence", "create temp sequence" ]); const SEQUENCE_CLAUSE_STARTERS = new Set([ "increment", "start", "minvalue", "maxvalue", "cache", "cycle", "owned", "no" ]); class CreateSequenceParser { static parse(sql) { var _a, _b; const tokenizer = new SqlTokenizer_1.SqlTokenizer(sql); const lexemes = tokenizer.readLexemes(); const result = this.parseFromLexeme(lexemes, 0); // Ensure the statement was fully consumed and no extra tokens remain. if (result.newIndex < lexemes.length) { const unexpected = lexemes[result.newIndex]; const position = (_b = (_a = unexpected.position) === null || _a === void 0 ? void 0 : _a.startPosition) !== null && _b !== void 0 ? _b : 0; throw new Error(`[CreateSequenceParser] Unexpected token "${unexpected.value}" at position ${position}.`); } return result.value; } static parseFromLexeme(lexemes, index) { var _a, _b; let idx = index; // The cursor should start on a CREATE SEQUENCE command (possibly with TEMP/TEMPORARY). const command = (_a = lexemes[idx]) === null || _a === void 0 ? void 0 : _a.value.toLowerCase(); if (!command || !CREATE_SEQUENCE_COMMANDS.has(command)) { throw new Error(`[CreateSequenceParser] Expected CREATE SEQUENCE at index ${idx}.`); } idx++; let ifNotExists = false; // Accept an optional IF NOT EXISTS clause. if (((_b = lexemes[idx]) === null || _b === void 0 ? void 0 : _b.value.toLowerCase()) === "if not exists") { ifNotExists = true; idx++; } // Parse the qualified name of the target sequence. const nameResult = FullNameParser_1.FullNameParser.parseFromLexeme(lexemes, idx); const sequenceName = new ValueComponent_1.QualifiedName(nameResult.namespaces, nameResult.name); idx = nameResult.newIndex; // Gather sequence option clauses that follow the target identifier. const optionsResult = parseSequenceClauses(lexemes, idx); idx = optionsResult.newIndex; return { value: new DDLStatements_1.CreateSequenceStatement({ sequenceName, ifNotExists, clauses: optionsResult.clauses }), newIndex: idx }; } } exports.CreateSequenceParser = CreateSequenceParser; class AlterSequenceParser { static parse(sql) { var _a, _b; const tokenizer = new SqlTokenizer_1.SqlTokenizer(sql); const lexemes = tokenizer.readLexemes(); const result = this.parseFromLexeme(lexemes, 0); // Guard against unexpected trailing tokens after a valid ALTER SEQUENCE statement. if (result.newIndex < lexemes.length) { const unexpected = lexemes[result.newIndex]; const position = (_b = (_a = unexpected.position) === null || _a === void 0 ? void 0 : _a.startPosition) !== null && _b !== void 0 ? _b : 0; throw new Error(`[AlterSequenceParser] Unexpected token "${unexpected.value}" at position ${position}.`); } return result.value; } static parseFromLexeme(lexemes, index) { var _a, _b; let idx = index; // Expect ALTER SEQUENCE as the leading keywords. if (((_a = lexemes[idx]) === null || _a === void 0 ? void 0 : _a.value.toLowerCase()) !== "alter sequence") { throw new Error(`[AlterSequenceParser] Expected ALTER SEQUENCE at index ${idx}.`); } idx++; let ifExists = false; // Consume the optional IF EXISTS qualifier. if (((_b = lexemes[idx]) === null || _b === void 0 ? void 0 : _b.value.toLowerCase()) === "if exists") { ifExists = true; idx++; } // Parse the fully qualified sequence identifier. const nameResult = FullNameParser_1.FullNameParser.parseFromLexeme(lexemes, idx); const sequenceName = new ValueComponent_1.QualifiedName(nameResult.namespaces, nameResult.name); idx = nameResult.newIndex; // Capture any option clauses that follow. const optionsResult = parseSequenceClauses(lexemes, idx); idx = optionsResult.newIndex; return { value: new DDLStatements_1.AlterSequenceStatement({ sequenceName, ifExists, clauses: optionsResult.clauses }), newIndex: idx }; } } exports.AlterSequenceParser = AlterSequenceParser; function parseSequenceClauses(lexemes, index) { var _a, _b, _c, _d, _e; let idx = index; // Continue parsing clause-by-clause until a non-clause token stops the loop. const clauses = []; while (idx < lexemes.length) { const token = (_a = lexemes[idx]) === null || _a === void 0 ? void 0 : _a.value.toLowerCase(); // Terminate when there are no more tokens left to consume. if (!token) { break; } // Recognize INCREMENT BY <value> while tolerating the optional BY keyword. if (token === "increment") { idx++; idx = consumeOptionalKeyword(lexemes, idx, "by"); const valueResult = ValueParser_1.ValueParser.parseFromLexeme(lexemes, idx); clauses.push({ kind: "increment", value: valueResult.value }); idx = valueResult.newIndex; continue; } // START WITH <value> clause prefers a WITH keyword but does not require it. if (token === "start") { idx++; idx = consumeOptionalKeyword(lexemes, idx, "with"); const valueResult = ValueParser_1.ValueParser.parseFromLexeme(lexemes, idx); clauses.push({ kind: "start", value: valueResult.value }); idx = valueResult.newIndex; continue; } // MINVALUE <value> clause captures the lower bound directly. if (token === "minvalue") { idx++; const valueResult = ValueParser_1.ValueParser.parseFromLexeme(lexemes, idx); clauses.push({ kind: "minValue", value: valueResult.value }); idx = valueResult.newIndex; continue; } // MAXVALUE <value> clause captures the upper bound directly. if (token === "maxvalue") { idx++; const valueResult = ValueParser_1.ValueParser.parseFromLexeme(lexemes, idx); clauses.push({ kind: "maxValue", value: valueResult.value }); idx = valueResult.newIndex; continue; } // CACHE <value> clause records the in-memory cache size. if (token === "cache") { idx++; const valueResult = ValueParser_1.ValueParser.parseFromLexeme(lexemes, idx); clauses.push({ kind: "cache", value: valueResult.value }); idx = valueResult.newIndex; continue; } // CYCLE enables wrapping behavior. if (token === "cycle") { clauses.push({ kind: "cycle", enabled: true }); idx++; continue; } // RESTART may be followed by an optional WITH keyword and/or a value before the next clause starts. if (token === "restart") { idx++; idx = consumeOptionalKeyword(lexemes, idx, "with"); let restartValue; const nextToken = (_b = lexemes[idx]) === null || _b === void 0 ? void 0 : _b.value.toLowerCase(); if (nextToken && !SEQUENCE_CLAUSE_STARTERS.has(nextToken)) { const restartResult = ValueParser_1.ValueParser.parseFromLexeme(lexemes, idx); restartValue = restartResult.value; idx = restartResult.newIndex; } clauses.push({ kind: "restart", value: restartValue }); continue; } // OWNED BY clause can point to a target column or NONE. if (token === "owned") { idx++; if (((_c = lexemes[idx]) === null || _c === void 0 ? void 0 : _c.value.toLowerCase()) !== "by") { throw new Error(`[SequenceParser] Expected BY after OWNED at index ${idx}.`); } idx++; const nextToken = (_d = lexemes[idx]) === null || _d === void 0 ? void 0 : _d.value.toLowerCase(); if (nextToken === "none") { clauses.push({ kind: "ownedBy", none: true }); idx++; continue; } const ownerResult = FullNameParser_1.FullNameParser.parseFromLexeme(lexemes, idx); const ownerName = new ValueComponent_1.QualifiedName(ownerResult.namespaces, ownerResult.name); clauses.push({ kind: "ownedBy", target: ownerName }); idx = ownerResult.newIndex; continue; } // NO {MINVALUE|MAXVALUE|CACHE|CYCLE} disables the respective default clauses. if (token === "no") { const nextToken = (_e = lexemes[idx + 1]) === null || _e === void 0 ? void 0 : _e.value.toLowerCase(); if (nextToken === "minvalue") { clauses.push({ kind: "minValue", noValue: true }); idx += 2; continue; } if (nextToken === "maxvalue") { clauses.push({ kind: "maxValue", noValue: true }); idx += 2; continue; } if (nextToken === "cache") { clauses.push({ kind: "cache", noValue: true }); idx += 2; continue; } if (nextToken === "cycle") { clauses.push({ kind: "cycle", enabled: false }); idx += 2; continue; } } break; } return { clauses, newIndex: idx }; } function consumeOptionalKeyword(lexemes, index, keyword) { var _a; // Skip an optional keyword that may appear before a value (e.g., BY or WITH). if (((_a = lexemes[index]) === null || _a === void 0 ? void 0 : _a.value.toLowerCase()) === keyword) { return index + 1; } return index; } //# sourceMappingURL=SequenceParser.js.map