diffusion
Version:
Diffusion JavaScript client
147 lines (114 loc) • 4.21 kB
JavaScript
/*eslint valid-jsdoc: "off"*/
var Tokeniser = require('cbor/tokeniser');
var consts = require('cbor/consts');
var tokens = consts.tokens,
types = consts.types;
// Recursive token-parsing function; will evaluate tokens and build up complete object representations in order.
/*eslint complexity: ["error", 20]*/
function decode(token, tokeniser) {
switch (token.type) {
case tokens.VALUE :
return token.value;
case tokens.MAP_START :
var obj = {};
for (;;) {
var field = tokeniser.nextToken();
if (field === null) {
throw new Error('Unexpected EOF (reading: map key)');
}
if (field.type === tokens.MAP_END) {
break;
}
var value = tokeniser.nextToken();
if (value === null || value.type === tokens.MAP_END) {
throw new Error('Unexpected EOF (reading: map value)');
}
obj[decode(field, tokeniser)] = decode(value, tokeniser);
}
return obj;
case tokens.ARRAY_START :
var arr = [];
for (;;) {
var element = tokeniser.nextToken();
if (element === null) {
throw new Error('Unexpected EOF (reading: array value)');
}
if (element.type === tokens.ARRAY_END) {
break;
}
arr.push(decode(element, tokeniser));
}
return arr;
case tokens.STRING_START :
var chunks = [];
for (;;) {
var chunk = tokeniser.nextToken();
if (chunk === null) {
throw new Error('Unexpected EOF (reading: indefinite-length string');
}
if (chunk.type === tokens.STRING_END) {
break;
}
if (chunk.header.type !== token.header.type) {
throw new Error('Unexpected chunk type (' + chunk.header.type + ') within string');
}
chunks.push(chunk.value);
}
var joined;
if (token.header.type === types.BYTES) {
joined = Buffer.concat(chunks);
}
if (token.header.type === types.STRING) {
joined = chunks.join('');
}
return joined;
default :
throw new Error('Unexpected token: ' + JSON.stringify(token));
}
}
/**
* Provide a higher-level interface around Tokeniser, to enable the reading of
* complex values as represented in arrays/objects etc.
*/
module.exports = function Decoder(initial, offset, length) {
var tokeniser = (Buffer.isBuffer(initial)) ? new Tokeniser(initial, offset, length) : initial;
this.hasRemaining = tokeniser.hasRemaining;
/**
* Return the next value, parsed recursively.
* <P>
* If no tokens can be read, a read error will be thrown.
*/
this.nextValue = function () {
return this.nextValueExpecting(module.exports.isAny);
};
/**
* Return the next value, iff its CBOR type is acceptible to `cborTypePredicate`
* <P>
* `cborTypePredicate` must return true if the CBOR type is acceptible, false otherwise.
*/
this.nextValueExpecting = function (cborTypePredicate) {
if (tokeniser.hasRemaining()) {
var token = tokeniser.nextToken();
if (cborTypePredicate(token.header.type)) {
return decode(token, tokeniser);
} else {
throw new Error("Unexpected CBOR type");
}
} else {
throw new Error('Token stream exhausted');
}
};
};
module.exports.isDouble = function(cborType) {
return cborType === types.FLOAT;
};
module.exports.isInt64 = function(cborType) {
return cborType === types.UINT ||
cborType === types.INT;
};
module.exports.isString = function(cborType) {
return cborType === types.STRING;
};
module.exports.isAny = function() {
return true;
};