parser-combinator
Version:
Parser combinators
62 lines (48 loc) • 2.06 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", {
value: true
});
var _index = require('../../lib/stream/index');
var _index2 = _interopRequireDefault(_index);
var _tokenizer = require('../../lib/genlex/tokenizer');
var _tokenizer2 = _interopRequireDefault(_tokenizer);
var _token = require('../../lib/genlex/token');
var _token2 = _interopRequireDefault(_token);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var tkBuilder = _token2.default.builder;
/*
======== A Handy Little Nodeunit Reference ========
https://github.com/caolan/nodeunit
Test methods:
test.expect(numAssertions)
test.done()
Test assertions:
test.ok(value, [message])
test.equal(actual, expected, [message])
test.notEqual(actual, expected, [message])
test.deepEqual(actual, expected, [message])
test.notDeepEqual(actual, expected, [message])
test.strictEqual(actual, expected, [message])
test.notStrictEqual(actual, expected, [message])
test.throws(block, [error], [message])
test.doesNotThrow(block, [error], [message])
test.ifError(value)
*/
exports.default = {
setUp: function setUp(done) {
done();
},
'tokeniser is a success': function tokeniserIsASuccess(test) {
test.expect(1);
// tests here
test.ok((0, _tokenizer2.default)([':', '->']).tokenize(_index2.default.ofString('type f : a -> b')).isSuccess(), 'should be a success.');
test.done();
},
'tokeniser return a list of tokens': function tokeniserReturnAListOfTokens(test) {
test.expect(1);
// tests here
test.deepEqual((0, _tokenizer2.default)(['let', 'in', '=', '->']).tokenize(_index2.default.ofString('let f = \'a\' in "aa"')).success(), [tkBuilder.keyword('let'), tkBuilder.ident('f'), tkBuilder.keyword('='), tkBuilder.char('a'), tkBuilder.keyword('in'), tkBuilder.string('aa')], 'should be a a list of tokens.');
test.done();
}
};
//# sourceMappingURL=tokenizer_test.js.map