@rightcapital/phpdoc-parser
Version:
TypeScript version of PHPDoc parser with support for intersection types and generics
216 lines (215 loc) • 7.13 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TokenIterator = void 0;
const lexer_1 = require("../lexer/lexer");
const parser_exception_1 = require("./parser-exception");
class TokenIterator {
constructor(tokens, index = 0) {
this.savePoints = [];
this.skippedTokenTypes = [lexer_1.Lexer.TOKEN_HORIZONTAL_WS];
this.newline = null;
this.tokens = tokens;
this.index = index;
this.skipIrrelevantTokens();
}
getTokens() {
return this.tokens;
}
getContentBetween(startPos, endPos) {
if (startPos < 0 || endPos > this.tokens.length) {
throw new Error('LogicException');
}
let content = '';
for (let i = startPos; i < endPos; i++) {
content += this.tokens[i][lexer_1.Lexer.VALUE_OFFSET];
}
return content;
}
getTokenCount() {
return this.tokens.length;
}
currentTokenValue() {
return this.tokens[this.index][lexer_1.Lexer.VALUE_OFFSET];
}
currentTokenType() {
return this.tokens[this.index][lexer_1.Lexer.TYPE_OFFSET];
}
currentTokenOffset() {
let offset = 0;
for (let i = 0; i < this.index; i++) {
offset += this.tokens[i][lexer_1.Lexer.VALUE_OFFSET].length;
}
return offset;
}
currentTokenLine() {
return this.tokens[this.index][lexer_1.Lexer.LINE_OFFSET];
}
currentTokenIndex() {
return this.index;
}
endIndexOfLastRelevantToken() {
let endIndex = this.currentTokenIndex();
endIndex--;
while (this.skippedTokenTypes.includes(this.tokens[endIndex][lexer_1.Lexer.TYPE_OFFSET])) {
if (!this.tokens[endIndex - 1]) {
break;
}
endIndex--;
}
return endIndex;
}
isCurrentTokenValue(tokenValue) {
return this.tokens[this.index][lexer_1.Lexer.VALUE_OFFSET] === tokenValue;
}
isCurrentTokenType(...tokenType) {
return tokenType.includes(this.tokens[this.index][lexer_1.Lexer.TYPE_OFFSET]);
}
isPrecededByHorizontalWhitespace() {
var _a, _b;
return (((_b = (_a = this.tokens[this.index - 1]) === null || _a === void 0 ? void 0 : _a[lexer_1.Lexer.TYPE_OFFSET]) !== null && _b !== void 0 ? _b : -1) ===
lexer_1.Lexer.TOKEN_HORIZONTAL_WS);
}
consumeTokenType(tokenType) {
if (this.tokens[this.index][lexer_1.Lexer.TYPE_OFFSET] !== tokenType) {
this.throwError(tokenType);
}
if (tokenType === lexer_1.Lexer.TOKEN_PHPDOC_EOL) {
if (this.newline === null) {
this.detectNewline();
}
}
this.index++;
this.skipIrrelevantTokens();
}
consumeTokenValue(tokenType, tokenValue) {
if (this.tokens[this.index][lexer_1.Lexer.TYPE_OFFSET] !== tokenType ||
this.tokens[this.index][lexer_1.Lexer.VALUE_OFFSET] !== tokenValue) {
this.throwError(tokenType, tokenValue);
}
this.index++;
this.skipIrrelevantTokens();
}
tryConsumeTokenValue(tokenValue) {
if (this.tokens[this.index][lexer_1.Lexer.VALUE_OFFSET] !== tokenValue) {
return false;
}
this.index++;
this.skipIrrelevantTokens();
return true;
}
tryConsumeTokenType(tokenType) {
if (this.tokens[this.index][lexer_1.Lexer.TYPE_OFFSET] !== tokenType) {
return false;
}
if (tokenType === lexer_1.Lexer.TOKEN_PHPDOC_EOL) {
if (this.newline === null) {
this.detectNewline();
}
}
this.index++;
this.skipIrrelevantTokens();
return true;
}
detectNewline() {
const value = this.currentTokenValue();
if (value.substring(0, 2) === '\r\n') {
this.newline = '\r\n';
}
else if (value.substring(0, 1) === '\n') {
this.newline = '\n';
}
}
getSkippedHorizontalWhiteSpaceIfAny() {
if (this.index > 0 &&
this.tokens[this.index - 1][1] === lexer_1.Lexer.TOKEN_HORIZONTAL_WS) {
return this.tokens[this.index - 1][0];
}
return '';
}
joinUntil(...tokenType) {
let s = '';
while (!tokenType.includes(this.tokens[this.index][1])) {
s += this.tokens[this.index++][0];
}
return s;
}
next() {
this.index++;
this.skipIrrelevantTokens();
}
skipIrrelevantTokens() {
if (this.tokens[this.index] === undefined) {
return;
}
while (this.skippedTokenTypes.includes(this.tokens[this.index][lexer_1.Lexer.TYPE_OFFSET])) {
if (this.tokens[this.index + 1] === undefined) {
break;
}
this.index++;
}
}
addEndOfLineToSkippedTokens() {
this.skippedTokenTypes = [
lexer_1.Lexer.TOKEN_HORIZONTAL_WS,
lexer_1.Lexer.TOKEN_PHPDOC_EOL,
];
}
removeEndOfLineFromSkippedTokens() {
this.skippedTokenTypes = [lexer_1.Lexer.TOKEN_HORIZONTAL_WS];
}
forwardToTheEnd() {
const lastToken = this.tokens.length - 1;
this.index = lastToken;
}
pushSavePoint() {
this.savePoints.push(this.index);
}
dropSavePoint() {
this.savePoints.pop();
}
rollback() {
const index = this.savePoints.pop();
this.index = index;
}
throwError(expectedTokenType, expectedTokenValue = null) {
throw new parser_exception_1.ParserException(this.currentTokenValue(), this.currentTokenType(), this.currentTokenOffset(), expectedTokenType, expectedTokenValue, this.currentTokenLine());
}
hasTokenImmediatelyBefore(pos, expectedTokenType) {
const { tokens } = this;
pos--;
for (; pos >= 0; pos--) {
const token = tokens[pos];
const type = token[1];
if (type === expectedTokenType) {
return true;
}
if (![lexer_1.Lexer.TOKEN_HORIZONTAL_WS, lexer_1.Lexer.TOKEN_PHPDOC_EOL].includes(type)) {
break;
}
}
return false;
}
hasTokenImmediatelyAfter(pos, expectedTokenType) {
const { tokens } = this;
pos++;
for (let c = tokens.length; pos < c; pos++) {
const token = tokens[pos];
const type = token[1];
if (type === expectedTokenType) {
return true;
}
if (![lexer_1.Lexer.TOKEN_HORIZONTAL_WS, lexer_1.Lexer.TOKEN_PHPDOC_EOL].includes(type)) {
break;
}
}
return false;
}
getDetectedNewline() {
return this.newline;
}
hasParentheses(startPos, endPos) {
return (this.hasTokenImmediatelyBefore(startPos, lexer_1.Lexer.TOKEN_OPEN_PARENTHESES) &&
this.hasTokenImmediatelyAfter(endPos, lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES));
}
}
exports.TokenIterator = TokenIterator;