diffusion
Version:
Diffusion JavaScript client
449 lines (448 loc) • 16.2 kB
JavaScript
"use strict";
/**
* @module diffusion.datatypes
*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __values = (this && this.__values) || function(o) {
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
if (m) return m.call(o);
if (o && typeof o.length === "number") return {
next: function () {
if (o && i >= o.length) o = void 0;
return { value: o && o[i++], done: !o };
}
};
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.SpanParser = void 0;
var errors_1 = require("./../../../errors/errors");
var consts = require("./../../cbor/consts");
var tokeniser_1 = require("./../../cbor/tokeniser");
var json_pointer_1 = require("./../../data/json/json-pointer");
/**
* An abstract accumulator that accumulates JSON values from tokens and passes
* the result to a {@link SpanParserConsumer}.
*/
var AbstractAccumulator = /** @class */ (function () {
/**
* Create an AbstractAccumulator
*
* @param tokeniser the CBOR tokeniser
* @param base the base JSON pointer to accumulate from
* @param start the offset of the start token
* @param next the index of the next token
*/
function AbstractAccumulator(tokeniser, base, start, next) {
/**
* The token ranges of the accumulated tokens
*/
this.tokenRange = [];
/**
* The number of accumulated tokens
*/
this.accumulatedTokens = 0;
/**
* The total number of tokens encountered
*/
this.totalTokens = 0;
this.tokeniser = tokeniser;
this.base = base;
this.startOffset = start;
this.nextOffset = next;
}
/**
* Get the total number of tokens encountered
*
* @return the total number of tokens encountered
*/
AbstractAccumulator.prototype.total = function () {
return this.totalTokens;
};
/**
* Get the number of accumulated tokens
*
* @return the number of accumulated tokens
*/
AbstractAccumulator.prototype.accumulated = function () {
return this.accumulatedTokens;
};
/**
* Create a new {@link ArrayAccumulator} from the current token position
*
* @param start the offset of the start token
* @param next the index of the next token
* @return a new array accumulator
*/
AbstractAccumulator.prototype.newArray = function (start, next) {
/* tslint:disable-next-line:no-use-before-declare */
return new ArrayAccumulator(this.tokeniser, this.currentPointer(this.base, this.totalTokens), start, next);
};
/**
* Create a new {@link ObjectAccumulator} from the current token position
*
* @param start the offset of the start token
* @param next the index of the next token
* @return a new object accumulator
*/
AbstractAccumulator.prototype.newObject = function (start, next) {
/* tslint:disable-next-line:no-use-before-declare */
return new ObjectAccumulator(this.tokeniser, this.currentPointer(this.base, this.totalTokens), start, next);
};
/**
* Check if the offset splits the accumulator content
*
* @param offset the offset to check
* @return `true` if the accumulator has encountered any tokens and
* the offset is greater than the starting offset
*/
AbstractAccumulator.prototype.notEmptyAndSplitBy = function (offset) {
return this.startOffset < offset && this.totalTokens !== 0;
};
/**
* Add tokens to the accumulator
*
* @param next the next offset up to which to add tokens
* @return `false`
*/
AbstractAccumulator.prototype.add = function (next) {
this.tokenRange[this.accumulatedTokens] = [this.nextOffset, next];
this.nextOffset = next;
++this.accumulatedTokens;
++this.totalTokens;
return false;
};
/**
* Increase the next offset without adding tokens
*
* @param next the new next offset
*/
AbstractAccumulator.prototype.setNextStart = function (offset) {
this.nextOffset = offset;
};
/**
* Skip one token
*/
AbstractAccumulator.prototype.skip = function () {
++this.totalTokens;
this.accumulatedTokens = 0;
};
/**
* Take all tokens. Passes all accumulated tokens to the abstract {@link take}
* method.
*
* @param consumer the consumer that will receive the tokens
*/
AbstractAccumulator.prototype.takeAll = function (consumer) {
var next = this.totalTokens;
var begin = next - this.accumulatedTokens;
for (var i = 0; i < this.accumulatedTokens; ++i) {
var range = this.tokenRange[i];
var start = range[0];
var end = range[1];
this.take(consumer, this.base, begin, begin + i, start, end - start);
}
this.accumulatedTokens = 0;
};
/**
* Signal the end of a split structure
*/
AbstractAccumulator.prototype.splitStructureEnd = function (consumer) {
consumer.splitStructureEnd(this.base, this.totalTokens, this.startOffset, this.nextOffset + 1 - this.startOffset);
};
/**
* Convert the accumulator to a string
*
* @returns a string representation of the accumulator
*/
AbstractAccumulator.prototype.toString = function () {
return this.base.toString();
};
return AbstractAccumulator;
}());
/**
* An implementation of the {@link AbstractAccumulator} that accumulates array
* values
*/
var ArrayAccumulator = /** @class */ (function (_super) {
__extends(ArrayAccumulator, _super);
/**
* Create an ArrayAccumulator
*
* @param tokeniser the CBOR tokeniser
* @param base the base JSON pointer to accumulate from
* @param start the offset of the start token
* @param next the index of the next token
*/
function ArrayAccumulator(tokeniser, base, start, next) {
return _super.call(this, tokeniser, base, start, next) || this;
}
/**
* @inheritdoc
*/
ArrayAccumulator.prototype.take = function (consumer, base, firstAccumulatedIndex, index, start, length) {
consumer.accept(base.withIndex(index), start, length);
};
/**
* @inheritdoc
*/
ArrayAccumulator.prototype.currentPointer = function (base, total) {
return base.withIndex(total);
};
return ArrayAccumulator;
}(AbstractAccumulator));
/**
* An implementation of the {@link AbstractAccumulator} that accumulates object
* values
*/
var ObjectAccumulator = /** @class */ (function (_super) {
__extends(ObjectAccumulator, _super);
/**
* Create an ObjectAccumulator
*
* @param tokeniser the CBOR tokeniser
* @param base the base JSON pointer to accumulate from
* @param start the offset of the start token
* @param next the index of the next token
*/
function ObjectAccumulator(tokeniser, base, start, next) {
var _this = _super.call(this, tokeniser, base, start, next) || this;
/**
* The pending object key
*/
_this.pendingKeyName = '';
/**
* Flag indicating that there is a pending key
*/
_this.pendingKey = true;
/**
* An array of object keys
*/
_this.keys = [];
return _this;
}
/**
* @inheritdoc
*/
ObjectAccumulator.prototype.add = function (tokenEnd) {
if (this.pendingKey) {
this.setNextStart(tokenEnd);
this.pendingKeyName = this.tokeniser.getToken().value;
this.pendingKey = false;
}
else {
this.keys[this.accumulated()] = this.pendingKeyName;
this.pendingKey = true;
_super.prototype.add.call(this, tokenEnd);
}
return !this.pendingKey;
};
/**
* @inheritdoc
*/
ObjectAccumulator.prototype.take = function (consumer, base, firstAccumulatedIndex, index, start, length) {
consumer.accept(base.withKey(this.keys[index - firstAccumulatedIndex]), start, length);
};
/**
* @inheritdoc
*/
ObjectAccumulator.prototype.currentPointer = function (base) {
return base.withKey(this.pendingKeyName);
};
return ObjectAccumulator;
}(AbstractAccumulator));
/**
* An implementation of the {@link AbstractAccumulator} that accumulates the
* JSON root value
*/
var RootAccumulator = /** @class */ (function (_super) {
__extends(RootAccumulator, _super);
/**
* Create an RootAccumulator
*
* @param tokeniser the CBOR tokeniser
*/
function RootAccumulator(tokeniser) {
return _super.call(this, tokeniser, json_pointer_1.ROOT, -1, 0) || this;
}
/**
* @inheritdoc
*/
RootAccumulator.prototype.take = function (consumer, base, firstAccumulatedIndex, index, start, length) {
if (this.total() > 1) {
throw new errors_1.InvalidDataError('Invalid JSON: multiple values found');
}
consumer.accept(base, start, length);
};
/**
* @inheritdoc
*/
RootAccumulator.prototype.currentPointer = function (base) {
return base;
};
return RootAccumulator;
}(AbstractAccumulator));
/**
* A SpanParser calculates JSON pointers for processing {@JSONDelta}.
* See {@link DeltaVisitor} for a more detailed discussion.
*/
var SpanParser = /** @class */ (function () {
/**
* Create a span parser using the JSON data
*
* @param the buffer containing the JSON data
*/
function SpanParser(json) {
/**
* The current stack of accumulators for {@link JSONPointer}s
*/
this.parentStack = [];
this.tokeniser = new tokeniser_1.Tokeniser(json.$buffer, json.$offset, json.$length);
this.structure = new RootAccumulator(this.tokeniser);
this.startOffset = this.tokeniser.offset();
this.json = json;
}
/**
* Parse the JSON data up to a given offset position. If the current reading
* position is less than the offset, this is guaranteed to read at least one
* value.
*
* @param offset the position up to which to read
* @param result the consumer that records the JSON pointers
* @return the change in the height of the parent stack
* @throws an {@link InvalidDataError} if there was an error parsing the tokens
*/
SpanParser.prototype.spanToNext = function (offset, result) {
return this.spanTo(offset, result, true);
};
/**
* Parse the JSON data up to a given offset position.
*
* @param offset the position up to which to read
* @param result the consumer that records the JSON pointers
* @param atLeastOne if `true` and if the current reading position is less
* than the offset, this is guaranteed to read at least
* one value
* @return the change in the height of the parent stack
* @throws an {@link InvalidDataError} if there was an error parsing the next token
*/ // eslint-disable-next-line complexity
SpanParser.prototype.spanTo = function (offset, result, atLeastOne) {
var e_1, _a;
if (atLeastOne === void 0) { atLeastOne = false; }
var start = this.nextByte();
if (start >= offset) {
return 0;
}
var lastHeight = this.parentStack.length;
var consumeFirstValue = atLeastOne;
var next = start;
var t;
do {
t = this.tokeniser.nextToken();
var tokenStart = next;
next = this.nextByte();
if (t === null) {
if (this.parentStack.length !== 0) {
throw new errors_1.InvalidDataError('Invalid structure');
}
break;
}
else if (t.type === consts.tokens.VALUE) {
consumeFirstValue = this.structure.add(next);
}
else if (t.type === consts.tokens.ARRAY_START) {
this.parentStack.push(this.structure);
this.structure = this.structure.newArray(tokenStart, next);
}
else if (t.type === consts.tokens.MAP_START) {
this.parentStack.push(this.structure);
this.structure = this.structure.newObject(tokenStart, next);
}
else if (consts.isStructEnd(t.type)) {
var parent_1 = this.parentStack.pop();
if (this.structure.notEmptyAndSplitBy(start)) {
this.structure.takeAll(result);
this.structure.splitStructureEnd(result);
parent_1.skip();
parent_1.setNextStart(next);
}
else {
parent_1.add(next);
}
this.structure = parent_1;
consumeFirstValue = false;
}
} while (consumeFirstValue || next < offset || this.nextTokenIsStructEnd(t, next));
try {
for (var _b = __values(this.parentStack), _c = _b.next(); !_c.done; _c = _b.next()) {
var accumulator = _c.value;
accumulator.takeAll(result);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_c && !_c.done && (_a = _b.return)) _a.call(_b);
}
finally { if (e_1) throw e_1.error; }
}
this.structure.takeAll(result);
return this.parentStack.length - lastHeight;
};
/**
* Determine if the next token ends an array, map, or string
*
* @param t the current token
* @param next the current reading position
* @return `true` if the next token ends a structure
*/
SpanParser.prototype.nextTokenIsStructEnd = function (t, next) {
var context = this.tokeniser.getContext();
if (consts.isStructStart(t.type)) {
return context.expected() === 0;
}
return context.acceptsBreakMarker() &&
next < this.json.$length &&
this.json.$buffer[this.json.$offset + next] === consts.BYTE_BREAK ||
!context.hasRemaining();
};
/**
* Get the position of the next byte
*
* @return the position of the next byte
*/
SpanParser.prototype.nextByte = function () {
return this.tokeniser.offset() - this.startOffset;
};
/**
* Convert the span parser to a string
*
* @returns a string representation of the span parser
*/
SpanParser.prototype.toString = function () {
var parts = ['SpanParser', ' next=', this.nextByte().toString(), ' ['];
this.parentStack.forEach(function (x) {
parts.push(x.toString());
parts.push(', ');
});
parts.push(this.structure.toString());
parts.push(']');
return parts.join('');
};
return SpanParser;
}());
exports.SpanParser = SpanParser;