@rightcapital/phpdoc-parser
Version:
TypeScript version of PHPDoc parser with support for intersection types and generics
559 lines (558 loc) • 27.4 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.PhpDocParser = void 0;
const assert_tag_method_value_node_1 = require("../ast/php-doc/assert-tag-method-value-node");
const assert_tag_property_value_node_1 = require("../ast/php-doc/assert-tag-property-value-node");
const assert_tag_value_node_1 = require("../ast/php-doc/assert-tag-value-node");
const deprecated_tag_value_node_1 = require("../ast/php-doc/deprecated-tag-value-node");
const extends_tag_value_node_1 = require("../ast/php-doc/extends-tag-value-node");
const generic_tag_value_node_1 = require("../ast/php-doc/generic-tag-value-node");
const implements_tag_value_node_1 = require("../ast/php-doc/implements-tag-value-node");
const invalid_tag_value_node_1 = require("../ast/php-doc/invalid-tag-value-node");
const method_tag_value_node_1 = require("../ast/php-doc/method-tag-value-node");
const method_tag_value_parameter_node_1 = require("../ast/php-doc/method-tag-value-parameter-node");
const mixin_tag_value_node_1 = require("../ast/php-doc/mixin-tag-value-node");
const param_out_tag_value_node_1 = require("../ast/php-doc/param-out-tag-value-node");
const param_tag_value_node_1 = require("../ast/php-doc/param-tag-value-node");
const php_doc_node_1 = require("../ast/php-doc/php-doc-node");
const php_doc_tag_node_1 = require("../ast/php-doc/php-doc-tag-node");
const php_doc_text_node_1 = require("../ast/php-doc/php-doc-text-node");
const property_tag_value_node_1 = require("../ast/php-doc/property-tag-value-node");
const return_tag_value_node_1 = require("../ast/php-doc/return-tag-value-node");
const self_out_tag_value_node_1 = require("../ast/php-doc/self-out-tag-value-node");
const template_tag_value_node_1 = require("../ast/php-doc/template-tag-value-node");
const throws_tag_value_node_1 = require("../ast/php-doc/throws-tag-value-node");
const type_alias_import_tag_value_node_1 = require("../ast/php-doc/type-alias-import-tag-value-node");
const type_alias_tag_value_node_1 = require("../ast/php-doc/type-alias-tag-value-node");
const typeless_param_tag_value_node_1 = require("../ast/php-doc/typeless-param-tag-value-node");
const uses_tag_value_node_1 = require("../ast/php-doc/uses-tag-value-node");
const var_tag_value_node_1 = require("../ast/php-doc/var-tag-value-node");
const identifier_type_node_1 = require("../ast/type/identifier-type-node");
const invalid_type_node_1 = require("../ast/type/invalid-type-node");
const types_1 = require("../ast/types");
const lexer_1 = require("../lexer/lexer");
const DISALLOWED_DESCRIPTION_START_TOKENS = [
lexer_1.Lexer.TOKEN_UNION,
lexer_1.Lexer.TOKEN_INTERSECTION,
];
class PhpDocParser {
constructor(typeParser, constantExprParser, requireWhitespaceBeforeDescription = false, preserveTypeAliasesWithInvalidTypes = false, usedAttributes = {
lines: false,
indexes: false,
}, parseDoctrineAnnotations = false, textBetweenTagsBelongsToDescription = false) {
var _a, _b;
this.typeParser = typeParser;
this.constantExprParser = constantExprParser;
this.requireWhitespaceBeforeDescription = requireWhitespaceBeforeDescription;
this.preserveTypeAliasesWithInvalidTypes = preserveTypeAliasesWithInvalidTypes;
this.parseDoctrineAnnotations = parseDoctrineAnnotations;
this.textBetweenTagsBelongsToDescription = textBetweenTagsBelongsToDescription;
this.useLinesAttributes = (_a = usedAttributes.lines) !== null && _a !== void 0 ? _a : false;
this.useIndexAttributes = (_b = usedAttributes.indexes) !== null && _b !== void 0 ? _b : false;
}
parse(tokens) {
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_PHPDOC);
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL);
const children = [];
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_PHPDOC)) {
children.push(this.parseChild(tokens));
while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL) &&
!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_PHPDOC)) {
children.push(this.parseChild(tokens));
}
}
try {
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_PHPDOC);
}
catch (error) {
let name = '';
let startLine = tokens.currentTokenLine();
let startIndex = tokens.currentTokenIndex();
if (children.length > 0) {
const lastChild = children[children.length - 1];
if (lastChild instanceof php_doc_tag_node_1.PhpDocTagNode) {
name = lastChild.name;
startLine = tokens.currentTokenLine();
startIndex = tokens.currentTokenIndex();
}
}
const tag = new php_doc_tag_node_1.PhpDocTagNode(name, this.enrichWithAttributes(tokens, new invalid_tag_value_node_1.InvalidTagValueNode(`${error === null || error === void 0 ? void 0 : error.message}`, error), startLine, startIndex));
tokens.forwardToTheEnd();
return this.enrichWithAttributes(tokens, new php_doc_node_1.PhpDocNode([
this.enrichWithAttributes(tokens, tag, startLine, startIndex),
]), 1, 0);
}
return this.enrichWithAttributes(tokens, new php_doc_node_1.PhpDocNode(children), 1, 0);
}
parseChild(tokens) {
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_PHPDOC_TAG)) {
const startLine = tokens.currentTokenLine();
const startIndex = tokens.currentTokenIndex();
return this.enrichWithAttributes(tokens, this.parseTag(tokens), startLine, startIndex);
}
const startLine = tokens.currentTokenLine();
const startIndex = tokens.currentTokenIndex();
const text = this.parseText(tokens);
return this.enrichWithAttributes(tokens, text, startLine, startIndex);
}
enrichWithAttributes(tokens, node, startLine, startIndex) {
if (this.useLinesAttributes) {
node.setAttribute(types_1.Attribute.START_LINE, startLine);
node.setAttribute(types_1.Attribute.END_LINE, tokens.currentTokenLine());
}
if (this.useIndexAttributes) {
node.setAttribute(types_1.Attribute.START_INDEX, startIndex);
node.setAttribute(types_1.Attribute.END_INDEX, tokens.endIndexOfLastRelevantToken());
}
return node;
}
parseText(tokens) {
var _a, _b;
let text = '';
let endTokens = [
lexer_1.Lexer.TOKEN_PHPDOC_EOL,
lexer_1.Lexer.TOKEN_CLOSE_PHPDOC,
lexer_1.Lexer.TOKEN_END,
];
if (this.textBetweenTagsBelongsToDescription) {
endTokens = [lexer_1.Lexer.TOKEN_CLOSE_PHPDOC, lexer_1.Lexer.TOKEN_END];
}
let savepoint = false;
while (this.textBetweenTagsBelongsToDescription ||
!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL)) {
const tmpText = tokens.getSkippedHorizontalWhiteSpaceIfAny() +
tokens.joinUntil(lexer_1.Lexer.TOKEN_PHPDOC_EOL, ...endTokens);
text += tmpText;
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL)) {
break;
}
if (this.textBetweenTagsBelongsToDescription) {
if (!savepoint) {
tokens.pushSavePoint();
savepoint = true;
}
else if (tmpText !== '') {
tokens.dropSavePoint();
tokens.pushSavePoint();
}
}
tokens.pushSavePoint();
tokens.next();
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_PHPDOC_TAG, ...endTokens)) {
tokens.rollback();
break;
}
tokens.dropSavePoint();
text += (_a = tokens.getDetectedNewline()) !== null && _a !== void 0 ? _a : '\n';
}
if (savepoint) {
tokens.rollback();
text = text.replace(new RegExp(`[${(_b = tokens.getDetectedNewline()) !== null && _b !== void 0 ? _b : '\n'}]+$`, 'g'), '');
}
return new php_doc_text_node_1.PhpDocTextNode(text.trim());
}
parseTag(tokens) {
const tag = tokens.currentTokenValue();
tokens.next();
const value = this.parseTagValue(tokens, tag);
return new php_doc_tag_node_1.PhpDocTagNode(tag, value);
}
parseTagValue(tokens, tag) {
const startLine = tokens.currentTokenLine();
const startIndex = tokens.currentTokenIndex();
let tagValue;
try {
tokens.pushSavePoint();
switch (tag) {
case '@param':
case '@phpstan-param':
case '@psalm-param':
tagValue = this.parseParamTagValue(tokens);
break;
case '@var':
case '@phpstan-var':
case '@psalm-var':
tagValue = this.parseVarTagValue(tokens);
break;
case '@return':
case '@phpstan-return':
case '@psalm-return':
tagValue = this.parseReturnTagValue(tokens);
break;
case '@throws':
case '@phpstan-throws':
tagValue = this.parseThrowsTagValue(tokens);
break;
case '@mixin':
tagValue = this.parseMixinTagValue(tokens);
break;
case '@deprecated':
tagValue = this.parseDeprecatedTagValue(tokens);
break;
case '@property':
case '@property-read':
case '@property-write':
case '@phpstan-property':
case '@phpstan-property-read':
case '@phpstan-property-write':
case '@psalm-property':
case '@psalm-property-read':
case '@psalm-property-write':
tagValue = this.parsePropertyTagValue(tokens);
break;
case '@method':
case '@phpstan-method':
case '@psalm-method':
tagValue = this.parseMethodTagValue(tokens);
break;
case '@template':
case '@phpstan-template':
case '@psalm-template':
case '@template-covariant':
case '@phpstan-template-covariant':
case '@psalm-template-covariant':
case '@template-contravariant':
case '@phpstan-template-contravariant':
case '@psalm-template-contravariant':
tagValue = this.parseTemplateTagValue(tokens, true);
break;
case '@extends':
case '@phpstan-extends':
case '@template-extends':
tagValue = this.parseExtendsTagValue('@extends', tokens);
break;
case '@implements':
case '@phpstan-implements':
case '@template-implements':
tagValue = this.parseExtendsTagValue('@implements', tokens);
break;
case '@use':
case '@phpstan-use':
case '@template-use':
tagValue = this.parseExtendsTagValue('@use', tokens);
break;
case '@phpstan-type':
case '@psalm-type':
tagValue = this.parseTypeAliasTagValue(tokens);
break;
case '@phpstan-import-type':
case '@psalm-import-type':
tagValue = this.parseTypeAliasImportTagValue(tokens);
break;
case '@phpstan-assert':
case '@phpstan-assert-if-true':
case '@phpstan-assert-if-false':
case '@psalm-assert':
case '@psalm-assert-if-true':
case '@psalm-assert-if-false':
tagValue = this.parseAssertTagValue(tokens);
break;
case '@phpstan-this-out':
case '@phpstan-self-out':
case '@psalm-this-out':
case '@psalm-self-out':
tagValue = this.parseSelfOutTagValue(tokens);
break;
case '@param-out':
case '@phpstan-param-out':
case '@psalm-param-out':
tagValue = this.parseParamOutTagValue(tokens);
break;
default:
tagValue = new generic_tag_value_node_1.GenericTagValueNode(this.parseOptionalDescription(tokens));
break;
}
tokens.dropSavePoint();
}
catch (e) {
tokens.rollback();
return new invalid_tag_value_node_1.InvalidTagValueNode(this.parseOptionalDescription(tokens), e);
}
return this.enrichWithAttributes(tokens, tagValue, startLine, startIndex);
}
parseParamTagValue(tokens) {
let type = null;
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_REFERENCE, lexer_1.Lexer.TOKEN_VARIADIC, lexer_1.Lexer.TOKEN_VARIABLE)) {
type = this.typeParser.parse(tokens);
}
const isReference = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_REFERENCE);
const isVariadic = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_VARIADIC);
const name = this.parseRequiredVariableName(tokens);
const description = this.parseOptionalDescription(tokens);
if (type !== null) {
return new param_tag_value_node_1.ParamTagValueNode(type, isVariadic, name, description, isReference);
}
return new typeless_param_tag_value_node_1.TypelessParamTagValueNode(isVariadic, name, description, isReference);
}
parseVarTagValue(tokens) {
const type = this.typeParser.parse(tokens);
const name = this.parseOptionalVariableName(tokens);
const description = this.parseOptionalDescription(tokens, name === '');
return new var_tag_value_node_1.VarTagValueNode(type, name, description);
}
parseReturnTagValue(tokens) {
const type = this.typeParser.parse(tokens);
const description = this.parseOptionalDescription(tokens, true);
return new return_tag_value_node_1.ReturnTagValueNode(type, description);
}
parseThrowsTagValue(tokens) {
const type = this.typeParser.parse(tokens);
const description = this.parseOptionalDescription(tokens, true);
return new throws_tag_value_node_1.ThrowsTagValueNode(type, description);
}
parseMixinTagValue(tokens) {
const type = this.typeParser.parse(tokens);
const description = this.parseOptionalDescription(tokens, true);
return new mixin_tag_value_node_1.MixinTagValueNode(type, description);
}
parseDeprecatedTagValue(tokens) {
const description = this.parseOptionalDescription(tokens);
return new deprecated_tag_value_node_1.DeprecatedTagValueNode(description);
}
parsePropertyTagValue(tokens) {
const type = this.typeParser.parse(tokens);
const name = this.parseRequiredVariableName(tokens);
const description = this.parseOptionalDescription(tokens);
return new property_tag_value_node_1.PropertyTagValueNode(type, name, description);
}
parseMethodTagValue(tokens) {
let isStatic = tokens.tryConsumeTokenValue('static');
let startLine = tokens.currentTokenLine();
let startIndex = tokens.currentTokenIndex();
const returnTypeOrName = this.typeParser.parse(tokens);
let returnType;
let name;
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER)) {
returnType = returnTypeOrName;
name = tokens.currentTokenValue();
tokens.next();
}
else if (returnTypeOrName instanceof identifier_type_node_1.IdentifierTypeNode) {
returnType = isStatic
? this.typeParser.enrichWithAttributes(tokens, new identifier_type_node_1.IdentifierTypeNode('static'), startLine, startIndex)
: null;
name = returnTypeOrName.name;
isStatic = false;
}
else {
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
}
const templateTypes = [];
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_OPEN_ANGLE_BRACKET)) {
do {
startLine = tokens.currentTokenLine();
startIndex = tokens.currentTokenIndex();
const templateType = this.parseTemplateTagValue(tokens, false);
templateTypes.push(this.enrichWithAttributes(tokens, templateType, startLine, startIndex));
} while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA));
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_ANGLE_BRACKET);
}
const parameters = [];
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OPEN_PARENTHESES);
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES)) {
const parameter = this.parseMethodTagValueParameter(tokens);
parameters.push(parameter);
while (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_COMMA)) {
parameters.push(this.parseMethodTagValueParameter(tokens));
}
}
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES);
const description = this.parseOptionalDescription(tokens);
return new method_tag_value_node_1.MethodTagValueNode(isStatic, returnType, name, parameters, description, templateTypes);
}
parseMethodTagValueParameter(tokens) {
const startLine = tokens.currentTokenLine();
const startIndex = tokens.currentTokenIndex();
let type = null;
switch (tokens.currentTokenType()) {
case lexer_1.Lexer.TOKEN_IDENTIFIER:
case lexer_1.Lexer.TOKEN_OPEN_PARENTHESES:
case lexer_1.Lexer.TOKEN_NULLABLE:
type = this.typeParser.parse(tokens);
break;
default:
type = null;
}
const isReference = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_REFERENCE);
const isVariadic = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_VARIADIC);
const name = tokens.currentTokenValue();
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_VARIABLE);
let defaultValue = null;
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_EQUAL)) {
defaultValue = this.constantExprParser.parse(tokens);
}
return this.enrichWithAttributes(tokens, new method_tag_value_parameter_node_1.MethodTagValueParameterNode(type, isReference, isVariadic, name, defaultValue), startLine, startIndex);
}
parseTemplateTagValue(tokens, parseDescription) {
const name = tokens.currentTokenValue();
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
let bound = null;
if (tokens.tryConsumeTokenValue('of') ||
tokens.tryConsumeTokenValue('as')) {
bound = this.typeParser.parse(tokens);
}
let defaultValue = null;
if (tokens.tryConsumeTokenValue('=')) {
defaultValue = this.typeParser.parse(tokens);
}
let description = '';
if (parseDescription) {
description = this.parseOptionalDescription(tokens);
}
return new template_tag_value_node_1.TemplateTagValueNode(name, bound, description, defaultValue);
}
parseExtendsTagValue(tagName, tokens) {
const startLine = tokens.currentTokenLine();
const startIndex = tokens.currentTokenIndex();
const baseType = new identifier_type_node_1.IdentifierTypeNode(tokens.currentTokenValue());
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
const type = this.typeParser.parseGeneric(tokens, this.enrichWithAttributes(tokens, baseType, startLine, startIndex));
const description = this.parseOptionalDescription(tokens);
switch (tagName) {
case '@extends':
return new extends_tag_value_node_1.ExtendsTagValueNode(type, description);
case '@implements':
return new implements_tag_value_node_1.ImplementsTagValueNode(type, description);
case '@use':
return new uses_tag_value_node_1.UsesTagValueNode(type, description);
default:
throw new Error('Should not happen');
}
}
parseTypeAliasTagValue(tokens) {
const alias = tokens.currentTokenValue();
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_EQUAL);
if (this.preserveTypeAliasesWithInvalidTypes) {
const startLine = tokens.currentTokenLine();
const startIndex = tokens.currentTokenIndex();
try {
const type = this.typeParser.parse(tokens);
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_CLOSE_PHPDOC)) {
if (!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL)) {
throw new Error('Expected end of line');
}
}
return new type_alias_tag_value_node_1.TypeAliasTagValueNode(alias, type);
}
catch (e) {
this.parseOptionalDescription(tokens);
return new type_alias_tag_value_node_1.TypeAliasTagValueNode(alias, this.enrichWithAttributes(tokens, new invalid_type_node_1.InvalidTypeNode(e), startLine, startIndex));
}
}
const type = this.typeParser.parse(tokens);
return new type_alias_tag_value_node_1.TypeAliasTagValueNode(alias, type);
}
parseTypeAliasImportTagValue(tokens) {
const importedAlias = tokens.currentTokenValue();
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
tokens.consumeTokenValue(lexer_1.Lexer.TOKEN_IDENTIFIER, 'from');
const identifierStartLine = tokens.currentTokenLine();
const identifierStartIndex = tokens.currentTokenIndex();
const importedFrom = tokens.currentTokenValue();
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
const importedFromType = this.enrichWithAttributes(tokens, new identifier_type_node_1.IdentifierTypeNode(importedFrom), identifierStartLine, identifierStartIndex);
let importedAs = null;
if (tokens.tryConsumeTokenValue('as')) {
importedAs = tokens.currentTokenValue();
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
}
return new type_alias_import_tag_value_node_1.TypeAliasImportTagValueNode(importedAlias, importedFromType, importedAs);
}
parseAssertTagValue(tokens) {
const isNegated = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_NEGATED);
const isEquality = tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_EQUAL);
const type = this.typeParser.parse(tokens);
const parameter = this.parseAssertParameter(tokens);
const description = this.parseOptionalDescription(tokens);
if (parameter.method) {
return new assert_tag_method_value_node_1.AssertTagMethodValueNode(type, parameter.parameter, parameter.method, isNegated, description, isEquality);
}
if (parameter.property) {
return new assert_tag_property_value_node_1.AssertTagPropertyValueNode(type, parameter.parameter, parameter.property, isNegated, description, isEquality);
}
return new assert_tag_value_node_1.AssertTagValueNode(type, parameter.parameter, isNegated, description, isEquality);
}
parseAssertParameter(tokens) {
let parameter;
let requirePropertyOrMethod;
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_THIS_VARIABLE)) {
parameter = '$this';
requirePropertyOrMethod = true;
tokens.next();
}
else {
parameter = tokens.currentTokenValue();
requirePropertyOrMethod = false;
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_VARIABLE);
}
if (requirePropertyOrMethod ||
tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_ARROW)) {
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_ARROW);
const name = tokens.currentTokenValue();
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_IDENTIFIER);
if (tokens.tryConsumeTokenType(lexer_1.Lexer.TOKEN_OPEN_PARENTHESES)) {
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_CLOSE_PARENTHESES);
return {
parameter,
method: name,
};
}
return {
parameter,
property: name,
};
}
return { parameter };
}
parseSelfOutTagValue(tokens) {
const type = this.typeParser.parse(tokens);
const description = this.parseOptionalDescription(tokens);
return new self_out_tag_value_node_1.SelfOutTagValueNode(type, description);
}
parseParamOutTagValue(tokens) {
const type = this.typeParser.parse(tokens);
const name = this.parseRequiredVariableName(tokens);
const description = this.parseOptionalDescription(tokens);
return new param_out_tag_value_node_1.ParamOutTagValueNode(type, name, description);
}
parseOptionalVariableName(tokens) {
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_VARIABLE)) {
const name = tokens.currentTokenValue();
tokens.next();
return name;
}
if (tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_THIS_VARIABLE)) {
const name = '$this';
tokens.next();
return name;
}
return '';
}
parseRequiredVariableName(tokens) {
const name = tokens.currentTokenValue();
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_VARIABLE);
return name;
}
parseOptionalDescription(tokens, limitStartToken = false) {
if (limitStartToken) {
for (const token of DISALLOWED_DESCRIPTION_START_TOKENS) {
if (!tokens.isCurrentTokenType(token)) {
continue;
}
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_OTHER);
}
if (this.requireWhitespaceBeforeDescription &&
!tokens.isCurrentTokenType(lexer_1.Lexer.TOKEN_PHPDOC_EOL, lexer_1.Lexer.TOKEN_CLOSE_PHPDOC, lexer_1.Lexer.TOKEN_END) &&
!tokens.isPrecededByHorizontalWhitespace()) {
tokens.consumeTokenType(lexer_1.Lexer.TOKEN_HORIZONTAL_WS);
}
}
return this.parseText(tokens).text;
}
}
exports.PhpDocParser = PhpDocParser;
PhpDocParser.DISALLOWED_DESCRIPTION_START_TOKENS = [
lexer_1.Lexer.TOKEN_UNION,
lexer_1.Lexer.TOKEN_INTERSECTION,
];