UNPKG

parser-combinator

Version:
51 lines (36 loc) 5.2 kB
'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); /* * Parsec * https://github.com/d-plaindoux/parsec * * Copyright (c) 2016 Didier Plaindoux * Licensed under the LGPL2 license. */ exports.default = function (keywords) { return new Tokenizer(keywords); }; var _genlex = require('./genlex'); var _genlex2 = _interopRequireDefault(_genlex); var _token = require('./token'); var _token2 = _interopRequireDefault(_token); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var Tokenizer = function () { // [String] -> Tokenizer function Tokenizer(keywords) { _classCallCheck(this, Tokenizer); this.parser = _genlex2.default.generator(keywords).tokens(_token2.default.builder); } // Stream char -> Try [Token] _createClass(Tokenizer, [{ key: 'tokenize', value: function tokenize(charstream) { return this.parser.parse(charstream, 0).toTry(); } }]); return Tokenizer; }(); //# sourceMappingURL=tokenizer.js.map