tokenize-json
Version:
Streaming, environment agnostic JSON tokenizer.
118 lines • 5.18 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const test = require("tape-promise/tape");
const token_1 = require("./token");
const tokenize_1 = require("./tokenize");
readPackage();
async function readPackage() {
const stream = fs.createReadStream("package.json", "utf8");
const tokens = tokenize_1.tokenize(stream);
let objectCount = 0;
for await (const token of tokens) {
if (token.type === token_1.TokenType.ObjectOpen) {
objectCount++;
}
}
console.log(`found ${objectCount} objects`);
}
test("object", async (t) => {
t.deepEqual(await toTokenList("{}{}"), [
{ type: token_1.TokenType.ObjectOpen, value: "{" },
{ type: token_1.TokenType.ObjectClose, value: "}" },
{ type: token_1.TokenType.ObjectOpen, value: "{" },
{ type: token_1.TokenType.ObjectClose, value: "}" },
]);
});
test("array", async (t) => {
t.deepEqual(await toTokenList("[][]"), [
{ type: token_1.TokenType.ArrayOpen, value: "[" },
{ type: token_1.TokenType.ArrayClose, value: "]" },
{ type: token_1.TokenType.ArrayOpen, value: "[" },
{ type: token_1.TokenType.ArrayClose, value: "]" },
]);
});
test("nested array", async (t) => {
t.deepEqual(await toTokenList("[[],[[]],{}]"), [
{ type: token_1.TokenType.ArrayOpen, value: "[" },
{ type: token_1.TokenType.ArrayOpen, value: "[" },
{ type: token_1.TokenType.ArrayClose, value: "]" },
{ type: token_1.TokenType.Comma, value: "," },
{ type: token_1.TokenType.ArrayOpen, value: "[" },
{ type: token_1.TokenType.ArrayOpen, value: "[" },
{ type: token_1.TokenType.ArrayClose, value: "]" },
{ type: token_1.TokenType.ArrayClose, value: "]" },
{ type: token_1.TokenType.Comma, value: "," },
{ type: token_1.TokenType.ObjectOpen, value: "{" },
{ type: token_1.TokenType.ObjectClose, value: "}" },
{ type: token_1.TokenType.ArrayClose, value: "]" },
]);
});
test("whitespace", async (t) => {
t.deepEqual(await toTokenList(" { } "), [
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.ObjectOpen, value: "{" },
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.ObjectClose, value: "}" },
{ type: token_1.TokenType.Whitespace, value: " " },
]);
});
test("string", async (t) => {
t.deepEqual(await toTokenList(" \"a\\\"bc\" \"\\u1234\\uffff\" "), [
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.StringOpen, value: "\"" },
{ type: token_1.TokenType.StringChunk, value: "a\\\"bc" },
{ type: token_1.TokenType.StringClose, value: "\"" },
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.StringOpen, value: "\"" },
{ type: token_1.TokenType.StringChunk, value: "\\u1234\\uffff" },
{ type: token_1.TokenType.StringClose, value: "\"" },
{ type: token_1.TokenType.Whitespace, value: " " },
]);
});
test("keyword", async (t) => {
t.deepEqual(await toTokenList(" false true null "), [
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.False, value: "false" },
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.True, value: "true" },
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.Null, value: "null" },
{ type: token_1.TokenType.Whitespace, value: " " },
]);
});
test("object with members", async (t) => {
t.deepEqual(await toTokenList("{\"a\":true}"), [
{ type: token_1.TokenType.ObjectOpen, value: "{" },
{ type: token_1.TokenType.StringOpen, value: "\"" },
{ type: token_1.TokenType.StringChunk, value: "a" },
{ type: token_1.TokenType.StringClose, value: "\"" },
{ type: token_1.TokenType.Colon, value: ":" },
{ type: token_1.TokenType.True, value: "true" },
{ type: token_1.TokenType.ObjectClose, value: "}" },
]);
});
test("number", async (t) => {
t.deepEqual(await toTokenList("10 -2 -000.1 10e2 10E-2"), [
{ type: token_1.TokenType.Number, value: "10" },
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.Number, value: "-2" },
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.Number, value: "-0" },
{ type: token_1.TokenType.Number, value: "0" },
{ type: token_1.TokenType.Number, value: "0.1" },
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.Number, value: "10e2" },
{ type: token_1.TokenType.Whitespace, value: " " },
{ type: token_1.TokenType.Number, value: "10E-2" },
]);
});
async function toTokenList(chunks) {
const tokens = tokenize_1.tokenize(chunks);
const list = new Array();
for await (const token of tokens) {
list.push(token);
}
return list;
}
//# sourceMappingURL=tokenize.spec.js.map