xast
Version:
AST parsing library
60 lines • 3.17 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.addDefaultTokenizers = void 0;
const TokenKind_1 = require("./TokenKind");
const createToken_1 = require("./createToken");
const readName_1 = require("./tokenizers/readName");
const readString_1 = require("./tokenizers/readString");
const readNumber_1 = require("./tokenizers/readNumber");
const readComment_1 = require("./tokenizers/readComment");
const createTokenizer = (tokenKind) => {
return (lexer, position) => (0, createToken_1.createToken)(lexer, tokenKind, position, position + 1);
};
const addDefaultTokenizers = (lexer) => {
lexer.add(0x0023, (lexer, position, code) => (0, readComment_1.readComment)(lexer, position, code));
lexer.add(0x002f, (lexer, position, code) => {
if (lexer.source.body[position + 1].charCodeAt(0) === 0x002f) {
return (0, readComment_1.readComment)(lexer, position + 1, code);
}
});
lexer.add(0x0022, readString_1.readString);
lexer.add(0x0027, readString_1.readString);
lexer.add(0x0060, readString_1.readString);
lexer.add(0x002d, readNumber_1.readNumber);
for (let code = 0x0030; code <= 0x0039; code++) {
lexer.add(code, readNumber_1.readNumber);
}
lexer.add(0x005f, readName_1.readName);
for (let code = 0x0061; code <= 0x007a; code++) {
lexer.add(code, readName_1.readName);
}
for (let code = 0x0041; code <= 0x005a; code++) {
lexer.add(code, readName_1.readName);
}
lexer.add(0x0021, createTokenizer(TokenKind_1.TokenKind.BANG));
lexer.add(0x0024, createTokenizer(TokenKind_1.TokenKind.DOLLAR));
lexer.add(0x0026, createTokenizer(TokenKind_1.TokenKind.AMP));
lexer.add(0x0028, createTokenizer(TokenKind_1.TokenKind.PAREN_L));
lexer.add(0x0029, createTokenizer(TokenKind_1.TokenKind.PAREN_R));
lexer.add(0x002c, createTokenizer(TokenKind_1.TokenKind.COMMA));
lexer.add(0x002e, (lexer, position) => {
const body = lexer.source.body;
if (body.charCodeAt(position + 1) === 0x002e &&
body.charCodeAt(position + 2) === 0x002e) {
return (0, createToken_1.createToken)(lexer, TokenKind_1.TokenKind.SPREAD, position, position + 3);
}
return (0, createToken_1.createToken)(lexer, TokenKind_1.TokenKind.DOT, position, position + 1);
});
lexer.add(0x003a, createTokenizer(TokenKind_1.TokenKind.COLON));
lexer.add(0x003b, createTokenizer(TokenKind_1.TokenKind.SEMICOLON));
lexer.add(0x003d, createTokenizer(TokenKind_1.TokenKind.EQUALS));
lexer.add(0x0040, createTokenizer(TokenKind_1.TokenKind.AT));
lexer.add(0x005b, createTokenizer(TokenKind_1.TokenKind.BRACKET_L));
lexer.add(0x005d, createTokenizer(TokenKind_1.TokenKind.BRACKET_R));
lexer.add(0x007b, createTokenizer(TokenKind_1.TokenKind.BRACE_L));
lexer.add(0x007c, createTokenizer(TokenKind_1.TokenKind.PIPE));
lexer.add(0x007d, createTokenizer(TokenKind_1.TokenKind.BRACE_R));
lexer.add(0x003f, createTokenizer(TokenKind_1.TokenKind.QUESTION_MARK));
};
exports.addDefaultTokenizers = addDefaultTokenizers;
//# sourceMappingURL=addDefaultTokenizers.js.map