buntis
Version:
A 100% compliant, self-hosted typescript parser that emits an ESTree-compatible abstract syntax tree
1,656 lines (1,426 loc) • 255 kB
text/typescript
import { Token, KeywordDescTable } from './token';
import { nextToken } from './scanner/scan';
import { Errors, report } from './errors';
import { scanTemplateTail } from './scanner/string';
import * as ESTree from './estree';
import { scanJSXIdentifier, scanJSXToken, scanJSXAttributeValue } from './scanner/jsx';
import {
Context,
Flags,
Origin,
BindingKind,
TypeScriptMadness,
CommentCallback,
ErrorCallback,
ParserState,
PropertyKind,
Options,
optionalBit,
consume,
consumeOpt,
consumeSemicolon,
canParseSemicolon,
reinterpretToPattern,
isValidIdentifier,
reinterpretToTypeLiteral,
ModifierKind,
TypeAssertionState,
TypeAliasState,
primaryTypes,
tryScan,
canFollowTypeArgumentsInExpression,
isEqualTagName,
ClassAndFunctionFlags,
isStrictReservedWord,
validateFunctionName,
finishNode
} from './common';
/**
* Create a new parser instance.
*/
export function create(source: string, onError?: ErrorCallback, onComment?: CommentCallback): ParserState {
return {
/**
* The source code to be parsed
*/
source,
/**
* The mutable parser flags, in case any flags need passed by reference.
*/
flags: Flags.Empty,
/**
* The current index
*/
index: 0,
/**
* Beginning of current line
*/
line: 1,
/**
* Beginning of current column
*/
column: 0,
/**
* Start position of text of current token
*/
tokenPos: 0,
/**
* Start position of whitespace before current token
*/
startPos: 0,
/**
* Start column position of whitespace before current token
*/
startColumn: 0,
/**
* Start line position of whitespace before current token
*/
startLine: 0,
/**
* End line position of whitespace before current token
*/
endLine: 0,
/**
* End column position of whitespace before current token
*/
endColumn: 0,
/**
* Sets to `1` if preciding line break
*/
precedingLineBreak: 0,
/**
* The end of the source code
*/
length: source.length,
/**
* The current token in the stream to consume
*/
token: Token.EndOfSource,
/**
* Holds the scanned token value
*/
tokenValue: undefined,
/**
* Holds the raw text that have been scanned by the lexer
*/
tokenRaw: '',
/**
* Holds the regExp info text that have been collected by the lexer
*/
tokenRegExp: undefined,
/**
* The code point at the current index
*/
nextCodePoint: source.charCodeAt(0),
/**
* Counter for incremental uid
*/
uid: 0,
/**
* Callback for comment attachment
*/
onComment,
/**
* Callback for error recovery
*/
onError
};
}
export function parseSource(
source: string,
options: Options | void,
context: Context,
onError?: ErrorCallback
): ESTree.Program {
let onComment: CommentCallback;
if (options != null) {
if (options.onComment != null) onComment = options.onComment;
context |=
(options.module ? Context.Module : 0) |
(options.next ? Context.OptionsNext : 0) |
(options.jsx ? Context.OptionsJSX : 0) |
(options.ranges ? Context.OptionsRanges : 0) |
(options.loc ? Context.OptionsLoc : 0) |
(options.impliedStrict ? Context.Strict : 0) |
(options.directives ? Context.OptionsDirectives | Context.OptionsRaw : 0) |
(options.globalReturn ? Context.OptionsGlobalReturn : 0) |
(options.disableWebCompat ? Context.DisableWebCompat : 0) |
(options.raw ? Context.OptionsRaw : 0) |
(options.ts ? Context.OptionsTS : 0);
}
let body: ESTree.Program[] = [];
const parser = create(source, onError, onComment);
const isModule = context & Context.Module;
nextToken(parser, context | Context.AllowRegExp);
context |= Context.InGlobal;
body =
context & Context.OptionsTS
? parseDeclarations(parser, context)
: isModule
? parseModuleItemList(parser, context)
: parseStatementList(parser, context);
const node: any = {
type: 'Program',
sourceType: isModule ? 'module' : 'script',
body
};
return node;
}
export function parseDeclarations(parser: ParserState, context: Context): ESTree.DeclarationStatement[] {
const statements: ESTree.DeclarationStatement[] = [];
while (parser.token === Token.StringLiteral) {
// "use strict" must be the exact literal without escape sequences or line continuation.
if (parser.index - parser.tokenPos < 13 && parser.tokenValue === 'use strict') {
if ((parser.token & Token.IsAutoSemicolon) === Token.IsAutoSemicolon || parser.precedingLineBreak === 0) {
context |= Context.Strict;
}
}
statements.push(parseStatementListItem(parser, context, parser.tokenPos));
}
while (parser.token !== Token.EndOfSource) {
statements.push(parseDeclarationList(parser, context, /* isModule */ 0, /* declared */ 0, /* isExport */ false));
}
return statements;
}
export function parseDeclarationList(
parser: ParserState,
context: Context,
isModule: 0 | 1,
declare: 0 | 1,
isExport: boolean
): any {
const { token } = parser;
let decorators: ESTree.Decorator[] = [];
switch (token) {
// 'declare'
case Token.DeclareKeyword: {
const { tokenValue } = parser;
switch (nextToken(parser, context)) {
case Token.ClassKeyword:
return parseTypescriptClassDeclaration(
parser,
context,
ClassAndFunctionFlags.None,
/* declare */ 1,
/* abstract */ 0
);
case Token.FunctionKeyword:
return parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.None,
Origin.Statement,
/* isAsync */ 0,
/* allowGen */ 1,
/* declare */ true
);
default:
if (
parser.precedingLineBreak === 0 &&
(parser.token & Token.IsDeclarationStatement) === Token.IsDeclarationStatement
) {
return parseDeclarationList(parser, context, isModule, 1, parser.token === Token.ExportKeyword);
}
return parseExpressionOrLabelledStatement(
parser,
context,
parseIdentifierFromValue(parser, context, tokenValue, /* allowAnnotations */ 0),
token,
0
);
}
}
// 'abstract'
case Token.AbstractKeyword:
return parseClassDeclarationOrIdentifier(parser, context, token, /* abstract */ 1, declare);
// 'namespace'
case Token.NamespaceKeyword:
return parseNamespaceOrIdentifier(parser, context, token, declare);
// 'enum'
case Token.EnumKeyword:
return parseEnumDeclarationOrIdentifier(parser, context, token, declare, /* isConst */ 0);
// 'interface'
case Token.InterfaceKeyword:
return parseInterfaceOrIdentifier(parser, context, token, declare);
// 'type'
case Token.TypeKeyword:
return parseTypeAliasOrIdentifier(parser, context, token, declare);
// 'module', 'global'
case Token.GlobalKeyword:
return parseGlobalModuleDeclarationOrIdentifier(parser, context, token, declare);
case Token.ModuleKeyword:
return parseModuleDeclarationOrIdentifier(parser, context, token, declare);
// 'const', 'const enum'
case Token.ConstKeyword:
return parseConstEnumOrVariableStatement(parser, context, declare);
case Token.At:
decorators = parseDecorators(parser, context);
if (parser.token === Token.ClassKeyword) {
return parseTypescriptClassDeclaration(
parser,
context,
ClassAndFunctionFlags.None,
/* declare */ 0,
/* abstract */ 0,
decorators as any,
true
);
}
// 'export'
case Token.ExportKeyword: {
if (isExport) report(parser, context, Errors.ExportAssignModifiers, /* early */ 0);
switch (nextToken(parser, context | Context.AllowRegExp)) {
case Token.DefaultKeyword:
return parseExportDefaultDeclaration(parser, context, decorators);
case Token.Assign:
return parseExportAssignment(parser, context);
case Token.DeclareKeyword:
case Token.InterfaceKeyword:
return parseExportNamedDeclaration(parser, context);
case Token.AsKeyword:
return parseNamespaceExportDeclaration(parser, context);
case Token.ImportKeyword:
return parseImportEqualsDeclaration(parser, context, /* isExport */ 1);
default:
return parseTypeScriptExportDeclaration(parser, context, decorators);
}
}
// 'import
case Token.ImportKeyword: {
if ((context & Context.InGlobal) < 1) {
report(parser, context, Errors.InvalidImportDeclNotGlobal, /* early */ 0);
}
return parseImportDeclaration(parser, context, /* isTS */ 1);
}
default:
return parseStatementListItem(parser, context, 1);
}
}
export function parseStatementList(parser: ParserState, context: Context): any {
const statements: any[] = [];
while (parser.token === Token.StringLiteral) {
// "use strict" must be the exact literal without escape sequences or line continuation.
if (parser.index - parser.tokenPos < 13 && parser.tokenValue === 'use strict') {
if ((parser.token & Token.IsAutoSemicolon) === Token.IsAutoSemicolon || parser.precedingLineBreak === 0) {
context |= Context.Strict;
}
}
statements.push(parseStatementListItem(parser, context, parser.tokenPos));
}
while (parser.token !== Token.EndOfSource) {
statements.push(parseStatementListItem(parser, context, parser.tokenPos));
}
return statements;
}
export function parseStatementListItem(parser: ParserState, context: Context, pos: number): any {
switch (parser.token) {
case Token.FunctionKeyword:
return parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.None,
Origin.Statement,
/* isAsync */ 0,
/* allowGen */ 1,
/* declare */ false
);
case Token.At:
case Token.ClassKeyword:
return context & Context.OptionsTS
? parseTypescriptClassDeclaration(
parser,
context,
ClassAndFunctionFlags.None,
/* declare */ 0,
/* abstract */ 0
)
: parseClassDeclaration(parser, context, ClassAndFunctionFlags.None);
case Token.ConstKeyword:
return parseLexicalDeclaration(parser, context, BindingKind.Const, Origin.None);
case Token.LetKeyword:
return parseLetIdentOrVarDeclarationStatement(parser, context);
case Token.ExportKeyword:
report(parser, context, Errors.Unexpected, 0);
case Token.ImportKeyword:
switch (nextToken(parser, context)) {
case Token.LeftParen:
return parseImportCallDeclaration(parser, context);
default:
report(parser, context, Errors.Unexpected, 0);
}
case Token.AsyncKeyword:
return parseAsyncArrowOrAsyncFunctionDeclaration(parser, context);
default:
return parseStatement(parser, context, pos);
}
}
export function parseAsyncArrowOrAsyncFunctionDeclaration(parser: ParserState, context: Context): any {
const { token } = parser;
let expr: any = parseIdentifier(parser, context);
if (parser.token === Token.Colon) {
return parseLabelledStatement(parser, context, token, expr, 1);
}
const asyncNewLine = parser.precedingLineBreak;
if (!asyncNewLine) {
// async function ...
if (parser.token === Token.FunctionKeyword) {
return parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.None,
Origin.Statement,
/* isAsync */ 1,
/* allowGen */ 1,
/* declare */ false
);
}
// async Identifier => ...
if ((parser.token & Token.IsIdentifier) === Token.IsIdentifier) {
expr = parseArrowFunctionExpression(
parser,
context,
[parseIdentifier(parser, context)],
null,
null,
/* isLHS */ 1,
/* isAsync */ 1
);
if (parser.token === Token.Comma) expr = parseSequenceExpression(parser, context, expr);
return parseExpressionStatement(parser, context, expr);
}
}
let typeParameters: any = null;
if (context & Context.OptionsTS && parser.token === Token.LessThan) {
typeParameters = parseAsyncIdentifierOrParameterInstantiation(parser, context);
if (typeParameters !== undefined && parser.token !== Token.LeftParen)
report(parser, context, Errors.Unexpected, /* early */ 0);
}
if (parser.token === Token.LeftParen) {
expr = parseAsyncArrowOrCallExpression(parser, context, 1, typeParameters, expr);
} else if (parser.token === Token.Arrow) {
expr = parseArrowFunctionExpression(parser, context, expr, null, null, /* isLHS */ 1, /* isAsync */ 1);
}
expr = parseMemberOrUpdateExpression(parser, context, expr);
if (parser.token === Token.Comma) expr = parseSequenceExpression(parser, context, expr);
expr = parseAssignmentExpression(parser, context, expr);
return parseExpressionStatement(parser, context, expr);
}
export function parseAsyncIdentifierOrParameterInstantiation(
parser: ParserState,
context: Context
): ESTree.TypeParameterInstantiation | void {
return tryScan(parser, context, function(): any {
nextToken(parser, context);
const params: any[] = [];
while (parser.token !== Token.GreaterThan) {
params.push(parseTypeParameter(parser, context | Context.Speculative));
if (parser.token !== Token.GreaterThan) {
if (!consumeOpt(parser, context, Token.Comma)) break;
}
}
if (!consumeOpt(parser, context, Token.GreaterThan)) {
return undefined;
}
return params && canFollowTypeArgumentsInExpression(parser)
? {
type: 'TypeParameterDeclaration',
params
}
: undefined;
});
}
export function parseAsyncArrowOrCallExpression(
parser: ParserState,
context: Context,
isLHS: 0 | 1,
typeParameters: any,
callee: ESTree.Identifier | void
): any {
nextToken(parser, context | Context.AllowRegExp);
context = (context | Context.DisallowIn) ^ Context.DisallowIn;
if (consumeOpt(parser, context, Token.RightParen)) {
let returnType: any[] | null = null;
if (context & Context.OptionsTS && parser.token === Token.Colon) {
returnType = parseTypeAnnotation(parser, context, /* restrictedTypes */ 0);
if (parser.token !== Token.Arrow) report(parser, context, Errors.Unexpected, /* early */ 0);
}
if (parser.token === Token.Arrow) {
if (isLHS) {
return parseArrowFunctionExpression(
parser,
context,
[],
typeParameters,
returnType,
/* isLHS */ 1,
/* isAsync */ 1
);
}
report(parser, context, Errors.Unexpected, /* early */ 1);
}
return finishNode({
type: 'CallExpression',
callee,
optional: false,
shortCircuited: false,
arguments: []
} as any);
}
let expr: any = null;
const params: ESTree.Expression[] = [];
while (parser.token !== Token.RightParen) {
const { token } = parser;
if ((token & (Token.IsIdentifier | Token.FutureReserved)) > 0) {
expr = parsePrimaryExpression(parser, context, /* isLHS */ 1, /* inParen */ 1);
if (parser.token === Token.QuestionMark) {
nextToken(parser, context);
if (parser.token === Token.Colon) {
expr.typeAnnotation = parseTypeAnnotation(parser, context, /* restrictedTypes */ 0);
}
return parseOptionalNchanged(parser, context, params, /* isAsync */ 1);
}
if (parser.token === Token.RightParen || parser.token === Token.Comma) {
// TODO
} else {
expr = parseMemberOrUpdateExpression(parser, context, expr);
if (parser.token !== Token.RightParen && parser.token !== Token.Comma) {
expr = parseAssignmentExpression(parser, context, expr);
}
}
} else if (token & Token.IsPatternStart) {
expr =
token === Token.LeftBrace
? parseObjectLiteralOrPattern(parser, context, 0, /* isPattern */ 1, [])
: parseArrayExpressionOrPattern(parser, context, 0, /* isPattern */ 1, []);
if (parser.token !== Token.RightParen && parser.token !== Token.Comma) {
expr = parseMemberOrUpdateExpression(parser, context, expr);
if ((parser.token & Token.IsBinaryOp) === Token.IsBinaryOp) {
expr = parseBinaryExpression(parser, context, 1, token, expr);
}
if (consumeOpt(parser, context | Context.AllowRegExp, Token.QuestionMark)) {
expr = parseConditionalExpression(parser, context, expr);
}
}
} else if (token === Token.Ellipsis) {
expr = parseSpreadOrRestElement(parser, context, Token.RightParen, /* allowTypeAnnotation */ 1, []);
} else {
expr = parseExpression(parser, context);
params.push(expr);
while (consumeOpt(parser, context | Context.AllowRegExp, Token.Comma)) {
params.push(parseExpression(parser, context));
}
consume(parser, context, Token.RightParen);
return finishNode({
type: 'CallExpression',
optional: false,
shortCircuited: false,
callee,
arguments: params
} as any);
}
params.push(expr as ESTree.Expression);
if (!consumeOpt(parser, context | Context.AllowRegExp, Token.Comma)) break;
}
consume(parser, context, Token.RightParen);
let returnType: any[] | null = null;
if (context & Context.Conditional && parser.token === Token.Colon) {
const returnType = tryScan(parser, context, function(): any {
const returnType = parseTypeAnnotation(parser, context | Context.Speculative, /* restrictedTypes */ 0);
if (parser.token === Token.Arrow) return returnType;
return undefined;
});
return returnType
? parseArrowFunctionExpression(parser, context, params, null, returnType, isLHS, /* isAsync */ 1)
: expr;
} else if (parser.token === Token.Colon) {
returnType = parseTypeAnnotation(parser, context, /* restrictedTypes */ 0);
if (parser.token !== Token.Arrow) report(parser, context, Errors.Unexpected, /* early */ 0);
}
if (parser.token === Token.Arrow) {
if (!isLHS) report(parser, context, Errors.Unexpected, /* early */ 1);
return parseArrowFunctionExpression(
parser,
context,
params,
typeParameters,
returnType,
/* isLHS */ 1,
/* isAsync */ 1
);
}
return finishNode({
type: 'CallExpression',
optional: false,
shortCircuited: false,
callee,
arguments: params
} as any);
}
export function parseModuleItemList(parser: ParserState, context: Context): any {
const statements: any[] = [];
while (parser.token !== Token.EndOfSource) {
statements.push(parseModuleItem(parser, context));
}
return statements;
}
export function parseModuleItem(parser: ParserState, context: Context): any {
// ecma262/#prod-ModuleItem
// ModuleItem :
// ImportDeclaration
// ExportDeclaration
// StatementListItem
switch (parser.token) {
case Token.ExportKeyword:
return parseExportDeclaration(parser, context);
case Token.ImportKeyword:
return parseImportDeclaration(parser, context, /* isTS */ 0);
default:
return parseStatementListItem(parser, context, 0);
}
}
function parseImportDeclaration(
parser: ParserState,
context: Context,
isTS: 0 | 1
): ESTree.ImportDeclaration | ESTree.ExpressionStatement {
let source: ESTree.Literal | null = null;
nextToken(parser, context);
const specifiers: (ESTree.ImportSpecifier | ESTree.ImportDefaultSpecifier | ESTree.ImportNamespaceSpecifier)[] = [];
// 'import' ModuleSpecifier ';'
if (parser.token === Token.StringLiteral) {
source = parseLiteral(parser, context);
} else {
if (parser.token & (Token.Keywords | Token.FutureReserved | Token.IsIdentifier)) {
const local = parseIdentifier(parser, context);
if (isTS && parser.token === Token.Assign) {
nextToken(parser, context);
const moduleReference: any = parseModuleReference(parser, context);
consumeSemicolon(parser, context);
return finishNode({
type: 'ImportEqualsDeclaration',
id: local,
moduleReference,
isExport: false
} as any);
}
specifiers.push(
finishNode({
type: 'ImportDefaultSpecifier',
local
})
);
// NameSpaceImport
if (consumeOpt(parser, context, Token.Comma)) {
switch (parser.token) {
case Token.Multiply:
specifiers.push(parseImportNamespaceSpecifier(parser, context));
break;
case Token.LeftBrace:
parseImportSpecifierOrNamedImports(parser, context, specifiers);
break;
default:
report(parser, context, Errors.Unexpected, /* early */ 1);
}
}
} else {
// Parse NameSpaceImport or NamedImports if present
switch (parser.token) {
case Token.Multiply:
specifiers.push(parseImportNamespaceSpecifier(parser, context));
break;
case Token.LeftBrace:
parseImportSpecifierOrNamedImports(parser, context, specifiers);
break;
case Token.LeftParen:
return parseImportCallDeclaration(parser, context);
default:
report(parser, context, Errors.UnexpectedToken, /* early */ 0, KeywordDescTable[parser.token & Token.Type]);
}
}
source = parseModuleSpecifier(parser, context);
}
consumeSemicolon(parser, context | Context.AllowRegExp);
return finishNode({
type: 'ImportDeclaration',
specifiers,
source
} as any);
}
export function parseImportCallDeclaration(parser: ParserState, context: Context): ESTree.ExpressionStatement {
let expr = parseImportExpression(parser, context);
expr = parseMemberOrUpdateExpression(parser, context, expr);
return parseExpressionStatement(parser, context, expr);
}
/**
* Parse binding identifier
*
* @see [Link](https://tc39.github.io/ecma262/#prod-NameSpaceImport)
*
* @param parser Parser object
* @param context Context masks
* @param specifiers Array of import specifiers
*/
function parseImportNamespaceSpecifier(parser: ParserState, context: Context): ESTree.ImportNamespaceSpecifier {
// NameSpaceImport:
// * as ImportedBinding
nextToken(parser, context);
consume(parser, context, Token.AsKeyword);
return finishNode({
type: 'ImportNamespaceSpecifier',
local: parseIdentifier(parser, context)
});
}
/**
* Parse module specifier
*
* @see [Link](https://tc39.github.io/ecma262/#prod-ModuleSpecifier)
*
* @param parser Parser object
* @param context Context masks
*/
function parseModuleSpecifier(parser: ParserState, context: Context): ESTree.Literal {
// ModuleSpecifier :
// StringLiteral
consumeOpt(parser, context, Token.FromKeyword);
if (parser.token !== Token.StringLiteral) report(parser, context, Errors.Unexpected, 0, 'Import');
return parseLiteral(parser, context);
}
function parseImportSpecifierOrNamedImports(
parser: ParserState,
context: Context,
specifiers: (ESTree.ImportSpecifier | ESTree.ImportDefaultSpecifier | ESTree.ImportNamespaceSpecifier)[]
): (ESTree.ImportSpecifier | ESTree.ImportDefaultSpecifier | ESTree.ImportNamespaceSpecifier)[] {
// NamedImports :
// '{' '}'
// '{' ImportsList '}'
// '{' ImportsList ',' '}'
//
// ImportsList :
// ImportSpecifier
// ImportsList ',' ImportSpecifier
//
// ImportSpecifier :
// BindingIdentifier
// IdentifierName 'as' BindingIdentifier
nextToken(parser, context);
while ((parser.token & (Token.FutureReserved | Token.IdentifierOrKeyword)) !== 0) {
const imported = parseIdentifier(parser, context);
let local: ESTree.Identifier;
if (consumeOpt(parser, context, Token.AsKeyword)) {
local = parseIdentifier(parser, context);
} else {
local = imported;
}
specifiers.push(
finishNode({
type: 'ImportSpecifier',
local,
imported
})
);
if (parser.token !== Token.RightBrace) consume(parser, context, Token.Comma);
}
consume(parser, context, Token.RightBrace);
return specifiers;
}
export function parseExportAssignment(parser: ParserState, context: Context): any {
nextToken(parser, context);
const expression = parseExpression(parser, context);
consumeSemicolon(parser, context);
return finishNode({
type: 'ExportAssignment',
expression
});
}
export function parseExportNamedDeclaration(parser: ParserState, context: Context) {
return finishNode({
type: 'ExportNamedDeclaration',
declaration: parseDeclarationList(parser, context, /* isModule */ 1, 1, /* isExport */ false),
specifiers: [],
source: null
});
}
export function parseNamespaceExportDeclaration(parser: ParserState, context: Context) {
nextToken(parser, context);
consumeOpt(parser, context, Token.NamespaceKeyword);
const id = parseIdentifier(parser, context);
consumeSemicolon(parser, context);
return finishNode({
type: 'NamespaceExportDeclaration',
id
});
}
export function parseTypeScriptExportDeclaration(
parser: ParserState,
context: Context,
decrorators: ESTree.Decorator[]
) {
if ((context & Context.InGlobal) < 1) report(parser, context, Errors.UnexpectedToken, 0, 'Export');
const specifiers: any[] = [];
let source = null;
let declaration: any = null;
switch (parser.token) {
case Token.NamespaceKeyword:
declaration = parseModuleOrNamespaceDeclaration(parser, context);
break;
case Token.ModuleKeyword:
declaration = parseModuleDeclaration(parser, context, false);
break;
case Token.EnumKeyword:
nextToken(parser, context);
declaration = parseEnumDeclaration(parser, context, 0, 0);
break;
case Token.TypeKeyword:
nextToken(parser, context);
declaration = parseTypeAlias(parser, context, 0);
break;
case Token.AbstractKeyword:
nextToken(parser, context);
if (parser.token !== Token.ClassKeyword) {
report(parser, context, Errors.UnexpectedToken, 0, KeywordDescTable[parser.token & Token.Type]);
}
declaration = parseTypescriptClassDeclaration(
parser,
context,
ClassAndFunctionFlags.Hoisted,
/* declare */ 0,
/* abstract */ 1
);
break;
case Token.Multiply: {
let ecma262PR: 0 | 1 = 0;
nextToken(parser, context); // Skips: '*'
if (context & Context.OptionsNext && consumeOpt(parser, context, Token.AsKeyword)) {
ecma262PR = 1;
specifiers.push(
finishNode({
type: 'ExportNamespaceSpecifier',
specifier: parseIdentifier(parser, context)
})
);
}
consume(parser, context, Token.FromKeyword);
source = parseLiteral(parser, context);
consumeSemicolon(parser, context | Context.AllowRegExp);
return ecma262PR
? {
type: 'ExportNamedDeclaration',
source,
specifiers
}
: ({
type: 'ExportAllDeclaration',
source
} as any);
}
case Token.LeftBrace: {
nextToken(parser, context); // Skips: '{'
while ((parser.token & (Token.FutureReserved | Token.IdentifierOrKeyword)) !== 0) {
const local = parseIdentifier(parser, context);
let exported: ESTree.Identifier | null;
if (parser.token === Token.AsKeyword) {
nextToken(parser, context);
exported = parseIdentifier(parser, context);
} else {
exported = local;
}
specifiers.push(
finishNode({
type: 'ExportSpecifier',
local,
exported
} as any)
);
if (parser.token !== Token.RightBrace) consume(parser, context, Token.Comma);
}
consume(parser, context, Token.RightBrace);
if (consumeOpt(parser, context, Token.FromKeyword)) {
// The left hand side can't be a keyword where there is no
// 'from' keyword since it references a local binding.
if (parser.token !== Token.StringLiteral) report(parser, context, Errors.UnexpectedToken, 0, 'Export');
source = parseLiteral(parser, context);
}
consumeSemicolon(parser, context | Context.AllowRegExp);
break;
}
case Token.At:
case Token.ClassKeyword:
declaration =
context & Context.OptionsTS
? parseTypescriptClassDeclaration(
parser,
context,
ClassAndFunctionFlags.Hoisted,
/* declare */ 0,
/* abstract */ 0,
decrorators as any
)
: parseClassDeclaration(parser, context, ClassAndFunctionFlags.Export);
break;
case Token.FunctionKeyword:
declaration = parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.Export,
Origin.TopLevel,
0,
/* allowGen */ 1,
false
);
break;
case Token.LetKeyword:
declaration = parseLexicalDeclaration(parser, context, BindingKind.Let, Origin.Export);
break;
case Token.ConstKeyword:
declaration = parseConstEnumOrVariableStatement(parser, context, 0);
break;
case Token.VarKeyword:
declaration = parseVariableStatement(parser, context, Origin.Export);
break;
case Token.AsyncKeyword:
nextToken(parser, context);
if (parser.precedingLineBreak === 0 && parser.token === Token.FunctionKeyword) {
declaration = parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.Export,
Origin.TopLevel,
1,
/* allowGen */ 1,
false
);
break;
}
// falls through
default:
report(parser, context, Errors.UnexpectedToken, 0, KeywordDescTable[parser.token & Token.Type]);
}
return finishNode({
type: 'ExportNamedDeclaration',
source,
specifiers,
declaration
});
}
export function parseModuleOrNamespaceDeclaration(parser: ParserState, context: Context): any {
nextToken(parser, context);
const id = parseIdentifier(parser, context);
let body: any;
if (consumeOpt(parser, context, Token.Period)) {
body = parseModuleOrNamespaceDeclaration(parser, context);
} else {
body = parseModuleBlock(parser, context);
}
consumeSemicolon(parser, context);
return finishNode({
type: 'ModuleDeclaration',
id,
body
});
}
function parseImportEqualsDeclaration(parser: ParserState, context: Context, isExport: 0 | 1): any {
consume(parser, context, Token.ImportKeyword);
const id = parseIdentifier(parser, context);
consume(parser, context | Context.AllowRegExp, Token.Assign);
const moduleReference: any = parseModuleReference(parser, context);
consumeSemicolon(parser, context);
return finishNode({
type: 'ImportEqualsDeclaration',
id,
isExport: isExport === 1,
moduleReference
});
}
function parseModuleReference(parser: ParserState, context: Context): any {
if (parser.token === Token.RequireKeyword) {
const { tokenValue } = parser;
nextToken(parser, context);
if (parser.token !== Token.LeftParen) {
// 'Require' as identifier
return parseQualifiedName(
parser,
context,
parseIdentifierFromValue(parser, context, tokenValue, /* allowAnnotations */ 0)
);
}
return parseExternalModuleReference(parser, context);
}
return parseEntityName(parser, context);
}
export function parseExternalModuleReference(parser: ParserState, context: Context): any {
nextToken(parser, context);
const expression = parseLiteral(parser, context);
consume(parser, context, Token.RightParen);
return finishNode({
type: 'ExternalModuleReference',
expression
});
}
export function parseExportDefaultDeclaration(
parser: ParserState,
context: Context,
decorators: ESTree.Decorator[] = []
): any {
let declaration: any = null;
nextToken(parser, context | Context.AllowRegExp);
switch (parser.token) {
// export default HoistableDeclaration[Default]
case Token.FunctionKeyword: {
declaration = parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.Hoisted,
Origin.TopLevel,
0,
/* allowGen */ 1,
false
);
break;
}
case Token.At:
case Token.ClassKeyword:
declaration =
context & Context.OptionsTS
? parseTypescriptClassDeclaration(
parser,
context,
ClassAndFunctionFlags.Hoisted,
/* declare */ 0,
/* abstract */ 0,
decorators as any
)
: parseClassDeclaration(parser, context, ClassAndFunctionFlags.Hoisted);
break;
case Token.AsyncKeyword:
declaration = parseIdentifier(parser, context);
if (parser.precedingLineBreak === 0) {
if (parser.token === Token.FunctionKeyword) {
declaration = parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.Hoisted,
Origin.TopLevel,
1,
/* allowGen */ 1,
false
);
} else {
let typeParameters: any = null;
if (context & Context.OptionsTS && parser.token === Token.LessThan) {
typeParameters = parseTypeParameters(parser, context);
if (parser.token !== Token.LeftParen) report(parser, context, Errors.Unexpected, /* early */ 0);
}
if (parser.token === Token.LeftParen) {
declaration = parseAsyncArrowOrCallExpression(parser, context, 1, typeParameters, declaration);
declaration = parseMemberOrUpdateExpression(parser, context, declaration as any);
declaration = parseAssignmentExpression(parser, context, declaration as any);
} else if ((parser.token & (Token.FutureReserved | Token.IdentifierOrKeyword)) !== 0) {
declaration = parseIdentifier(parser, context);
declaration = parseArrowFunctionExpression(parser, context, [declaration], null, null, 1, /* isAsync */ 1);
}
}
}
break;
case Token.InterfaceKeyword:
if (context & Context.OptionsTS) {
declaration = parseDeclarationList(parser, context, /* isModule */ 1, 1, /* isExport */ false);
break;
}
default:
declaration = parseExpression(parser, context);
consumeSemicolon(parser, context | Context.AllowRegExp);
}
return finishNode({
type: 'ExportDefaultDeclaration',
declaration
});
}
export function parseExportDeclaration(parser: ParserState, context: Context): any {
nextToken(parser, context | Context.AllowRegExp);
const specifiers: any[] = [];
let declaration: any = null;
let source: ESTree.Literal | null = null;
if (parser.token === Token.DefaultKeyword) {
return parseExportDefaultDeclaration(parser, context) as any;
}
switch (parser.token) {
case Token.Multiply: {
let ecma262PR: 0 | 1 = 0;
nextToken(parser, context); // Skips: '*'
if (context & Context.OptionsNext && consumeOpt(parser, context, Token.AsKeyword)) {
ecma262PR = 1;
specifiers.push(
finishNode({
type: 'ExportNamespaceSpecifier',
specifier: parseIdentifier(parser, context)
})
);
}
consume(parser, context, Token.FromKeyword);
source = parseLiteral(parser, context);
consumeSemicolon(parser, context | Context.AllowRegExp);
return ecma262PR
? {
type: 'ExportNamedDeclaration',
source,
specifiers
}
: ({
type: 'ExportAllDeclaration',
source
} as any);
}
case Token.LeftBrace: {
nextToken(parser, context); // Skips: '{'
while ((parser.token & (Token.FutureReserved | Token.Keywords | Token.IdentifierOrKeyword)) !== 0) {
const local = parseIdentifier(parser, context);
let exported: ESTree.Identifier | null;
if (parser.token === Token.AsKeyword) {
nextToken(parser, context);
exported = parseIdentifier(parser, context);
} else {
exported = local;
}
specifiers.push(
finishNode({
type: 'ExportSpecifier',
local,
exported
} as any)
);
if (parser.token !== Token.RightBrace) consume(parser, context, Token.Comma);
}
consume(parser, context, Token.RightBrace);
if (consumeOpt(parser, context, Token.FromKeyword)) {
// The left hand side can't be a keyword where there is no
// 'from' keyword since it references a local binding.
if (parser.token !== Token.StringLiteral) report(parser, context, Errors.UnexpectedToken, 0, 'Export');
source = parseLiteral(parser, context);
}
consumeSemicolon(parser, context | Context.AllowRegExp);
break;
}
case Token.At:
case Token.ClassKeyword:
declaration =
context & Context.OptionsTS
? parseTypescriptClassDeclaration(
parser,
context,
ClassAndFunctionFlags.None,
/* declare */ 0,
/* abstract */ 0
)
: parseClassDeclaration(parser, context, ClassAndFunctionFlags.None);
break;
case Token.FunctionKeyword:
declaration = parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.Export,
Origin.TopLevel,
0,
/* allowGen */ 1,
false
);
break;
case Token.LetKeyword:
declaration = parseLexicalDeclaration(parser, context, BindingKind.Let, Origin.Export);
break;
case Token.ConstKeyword:
declaration = parseLexicalDeclaration(parser, context, BindingKind.Const, Origin.Export | Origin.Declare);
break;
case Token.VarKeyword:
declaration = parseVariableStatement(parser, context, Origin.Export);
break;
case Token.AsyncKeyword:
nextToken(parser, context);
if (parser.precedingLineBreak === 0 && parser.token === Token.FunctionKeyword) {
declaration = parseFunctionDeclaration(
parser,
context,
ClassAndFunctionFlags.Export,
Origin.TopLevel,
1,
/* allowGen */ 1,
false
);
break;
}
// falls through
default:
report(parser, context, Errors.UnexpectedToken, 0, KeywordDescTable[parser.token & Token.Type]);
}
return finishNode({
type: 'ExportNamedDeclaration',
source,
specifiers,
declaration
});
}
export function parseStatement(parser: ParserState, context: Context, pos: number): any {
switch (parser.token) {
case Token.VarKeyword:
return parseVariableStatement(parser, context, Origin.None);
// [+Return] ReturnStatement[?Yield]
case Token.ReturnKeyword:
return parseReturnStatement(parser, context);
case Token.IfKeyword:
return parseIfStatement(parser, context);
case Token.ForKeyword:
return parseForStatement(parser, context);
// BreakableStatement[Yield, Return]:
// IterationStatement[?Yield, ?Return]
// SwitchStatement[?Yield, ?Return]
case Token.DoKeyword:
return parseDoWhileStatement(parser, context);
case Token.WhileKeyword:
return parseWhileStatement(parser, context);
case Token.SwitchKeyword:
return parseSwitchStatement(parser, context);
case Token.Semicolon:
// EmptyStatement
return parseEmptyStatement(parser, context);
// BlockStatement[?Yield, ?Return]
case Token.LeftBrace:
return parseBlock(parser, context);
// ThrowStatement[?Yield]
case Token.ThrowKeyword:
return parseThrowStatement(parser, context);
case Token.BreakKeyword:
// BreakStatement[?Yield]
return parseBreakStatement(parser, context);
// ContinueStatement[?Yield]
case Token.ContinueKeyword:
return parseContinueStatement(parser, context);
// TryStatement[?Yield, ?Return]
case Token.TryKeyword:
return parseTryStatement(parser, context);
// WithStatement[?Yield, ?Return]
case Token.WithKeyword:
return parseWithStatement(parser, context);
case Token.DebuggerKeyword:
// DebuggerStatement
return parseDebuggerStatement(parser, context);
case Token.AsyncKeyword:
return parseAsyncArrowOrAsyncFunctionDeclaration(parser, context);
case Token.CatchKeyword:
case Token.FinallyKeyword:
case Token.FunctionKeyword:
case Token.ClassKeyword:
report(parser, context, Errors.Unexpected, /* early */ 0);
default:
const { token } = parser;
const expr = parsePrimaryExpression(parser, context, /* isLHS */ 1, /* inParen */ 0);
return parseExpressionOrLabelledStatement(parser, context, expr, token, pos);
}
}
export function parseExpressionOrLabelledStatement(
parser: ParserState,
context: Context,
expr: any,
token: Token,
pos: number
): any {
if (parser.token === Token.Colon) return parseLabelledStatement(parser, context, token, expr, pos);
expr = parseMemberOrUpdateExpression(parser, context, expr);
expr = parseAssignmentExpression(parser, context, expr);
if (parser.token === Token.Comma) expr = parseSequenceExpression(parser, context, expr);
return parseExpressionStatement(parser, context, expr);
}
function parseLetIdentOrVarDeclarationStatement(parser: ParserState, context: Context): any {
const { token } = parser;
let expr: any = parseIdentifier(parser, context);
if (parser.token & (Token.IsIdentifier | Token.IsPatternStart)) {
/* VariableDeclarations ::
* ('let') (Identifier ('=' AssignmentExpression)?)+[',']
*/
const declarations = parseVariableDeclarationList(parser, context, BindingKind.Let, Origin.None);
consumeSemicolon(parser, context | Context.AllowRegExp);
return finishNode({
type: 'VariableDeclaration',
kind: 'let',
declarations
});
}
// 'Let' as identifier
if (parser.token === Token.Colon) {
return parseLabelledStatement(parser, context, token, expr, 1);
}
if (parser.token === Token.Arrow) {
expr = parseArrowFunctionExpression(parser, context, [expr], null, null, 1, /* isAsync */ 0);
} else {
expr = parseMemberOrUpdateExpression(parser, context, expr);
expr = parseAssignmentExpression(parser, context, expr);
}
/** Sequence expression
*/
if (parser.token === Token.Comma) {
expr = parseSequenceExpression(parser, context, expr);
}
return parseExpressionStatement(parser, context, expr);
}
export function parseReturnStatement(parser: ParserState, context: Context): ESTree.ReturnStatement {
// ReturnStatement ::
// 'return' [no line terminator] Expression? ';'
nextToken(parser, context | Context.AllowRegExp);
const argument =
parser.precedingLineBreak || parser.token & Token.IsAutoSemicolon ? null : parseExpressions(parser, context);
consumeSemicolon(parser, context | Context.AllowRegExp);
return finishNode({
type: 'ReturnStatement',
argument
});
}
export function parseEmptyStatement(parser: ParserState, context: Context): ESTree.EmptyStatement {
nextToken(parser, context | Context.AllowRegExp);
return finishNode({
type: 'EmptyStatement'
});
}
export function parseThrowStatement(parser: ParserState, context: Context): any {
// ThrowStatement ::
// 'throw' Expression ';'
nextToken(parser, context | Context.AllowRegExp);
const argument: ESTree.Expression = parseExpressions(parser, context);
consumeSemicolon(parser, context | Context.AllowRegExp);
return finishNode({
type: 'ThrowStatement',
argument
} as any);
}
export function parseIfStatement(parser: ParserState, context: Context): ESTree.IfStatement {
// IfStatement ::
// 'if' '(' Expression ')' Statement ('else' Statement)?
nextToken(parser, context);
consume(parser, context | Context.AllowRegExp, Token.LeftParen);
const test = parseExpressions(parser, context);
consume(parser, context | Context.AllowRegExp, Token.RightParen);
const consequent = parseConsequentOrAlternative(parser, context);
let alternate: ESTree.Statement | null = null;
if (parser.token === Token.ElseKeyword) {
nextToken(parser, context | Context.AllowRegExp);
alternate = parseConsequentOrAlternative(parser, context);
}
return finishNode({
type: 'IfStatement',
test,
consequent,
alternate
});
}
export function parseForStatement(
parser: ParserState,
context: Context
): ESTree.ForStatement | ESTree.ForInStatement | ESTree.ForOfStatement {
nextToken(parser, context);
const forAwait = (context & Context.InAwaitContext) > 0 && consumeOpt(parser, context, Token.AwaitKeyword);
consume(parser, context | Context.AllowRegExp, Token.LeftParen);
let test: ESTree.Expression | null = null;
let update: ESTree.Expression | null = null;
let init = null;
let isVarDecl =
parser.token === Token.VarKeyword || parser.token === Token.LetKeyword || parser.token === Token.ConstKeyword;
let right;
const { token } = parser;
if (isVarDecl) {
if (token === Token.LetKeyword) {
init = parseIdentifier(parser, context);
if (parser.token & (Token.IsIdentifier | Token.IsPatternStart)) {
if (parser.token === Token.InKeyword) {
if (context & Context.Strict) report(parser, context, Errors.Unexpected, /* early */ 0);
} else {
init = {
type: 'VariableDeclaration',
kind: 'let',
declarations: parseVariableDeclarationList(
parser,
context | Context.DisallowIn,
BindingKind.Let,
Origin.ForStatement
)
};
}
} else if (context & Context.Strict) {
report(parser, context, Errors.Unexpected, /* early */ 0);
} else {
isVarDecl = false;
init = parseMemberOrUpdateExpression(parser, context, init);
// `for of` only allows LeftHandSideExpressions which do not start with `let`, and no other production matches
if (parser.token === Token.OfKeyword) report(parser, context, Errors.Unexpected, /* early */ 1);
}
} else {
nextToken(parser, context);
init =
token === Token.VarKeyword
? {
type: 'VariableDeclaration',
kind: 'var',
declarations: parseVariableDeclarationList(
parser,
context | Context.DisallowIn,
BindingKind.Variable,
Origin.ForStatement
)
}
: {
type: 'VariableDeclaration',
kind: 'const',
declarations: parseVariableDeclarationList(
parser,
context | Context.DisallowIn,
BindingKind.Const,
Origin.ForStatement
)
};
}
} else if (token === Token.Semicolon) {
if (forAwait) report(parser, context, Errors.Unexpected, /* early */ 1);
} else if ((token & Token.IsPatternStart) === Token.IsPatternStart) {
init =
token === Token.LeftBrace
? parseObjectLiteralOrPattern(parser, context, 0, /* isPattern */ 0, [])
: parseArrayExpressionOrPattern(parser, context, 0, /* isPattern */ 0, []);
init = parseMemberOrUpdateExpression(parser, context | Context.DisallowIn, init as ESTree.Expression);
} else {
init = parseLeftHandSideExpression(parser, context | Context.DisallowIn, /* isLHS */ 1);
}
if (parser.token === Token.OfKeyword || parser.token === Token.InKeyword) {
if (parser.token === Token.OfKeyword) {
reinterpretToPattern(parser, init);
nextToken(parser, context | Context.AllowRegExp);
// IterationStatement:
// for(LeftHandSideExpression of AssignmentExpression) Statement
// forawait(LeftHandSideExpression of AssignmentExpression) Statement
right = parseExpression(parser, context);
consume(parser, context | Context.AllowRegExp, Token.RightParen);
const body = parseIterationStatementBody(parser, context);
return finishNode({
type: 'ForOfStatement',
body,
left: init,
right,
await: forAwait
});
}
reinterpretToPattern(parser, init);
nextToken(parser, context | Context.AllowRegExp);
// IterationStatement:
// for(LeftHandSideExpression in Expression) Statement
right = parseExpressions(parser, context);
consume(parser, context | Context.AllowRegExp, Token.RightParen);
const body = parseIterationStatementBody(parser, context);
return finishNode({
type: 'ForInStatement',
body,
left: init,
right
});
}
if (!isVarDecl) {
init = parseAssignmentExpression(parser, context | Context.DisallowIn, init);
}
if (parser.token === Token.Comma) init = parseSequenceExpression(parser, context, init);
consume(parser, context | Context.AllowRegExp, Token.Semicolon);
if (parser.token !== Token.Semicolon) test = parseExpressions(parser, context);
consume(parser, context | Context.AllowRegExp, Token.Semic