ton3-core
Version:
TON low-level API tools
352 lines • 14.1 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.deserializeFift = exports.deserialize = exports.serialize = void 0;
const builder_1 = require("./builder");
const cell_1 = require("./cell");
const helpers_1 = require("../utils/helpers");
const bits_1 = require("../utils/bits");
const numbers_1 = require("../utils/numbers");
const checksum_1 = require("../utils/checksum");
const REACH_BOC_MAGIC_PREFIX = (0, helpers_1.hexToBytes)('B5EE9C72');
const LEAN_BOC_MAGIC_PREFIX = (0, helpers_1.hexToBytes)('68FF65F3');
const LEAN_BOC_MAGIC_PREFIX_CRC = (0, helpers_1.hexToBytes)('ACC3A728');
const deserializeFift = (data) => {
if (!data) {
throw new Error('Can\'t deserialize. Empty fift hex.');
}
const re = /((\s*)x{([0-9a-zA-Z_]+)}\n?)/gmi;
const matches = [...data.matchAll(re)] || [];
if (!matches.length) {
throw new Error('Can\'t deserialize. Bad fift hex.');
}
const parseFiftHex = (fift) => {
if (fift === '_')
return [];
const bits = fift
.split('')
.map(el => (el === '_' ? el : (0, helpers_1.hexToBits)(el).join('')))
.join('')
.replace(/1[0]*_$/, '')
.split('')
.map(b => parseInt(b, 10));
return bits;
};
if (matches.length === 1) {
return [new cell_1.Cell({ bits: parseFiftHex(matches[0][3]) })];
}
const isLastNested = (stack, indent) => {
const lastStackIndent = stack[stack.length - 1].indent;
return lastStackIndent !== 0 && lastStackIndent >= indent;
};
const stack = matches.reduce((acc, el, i) => {
const [, , spaces, fift] = el;
const isLast = i === matches.length - 1;
const indent = spaces.length;
const bits = parseFiftHex(fift);
const builder = new builder_1.Builder()
.storeBits(bits);
while (acc.length && isLastNested(acc, indent)) {
const { builder: b } = acc.pop();
acc[acc.length - 1].builder.storeRef(b.cell());
}
if (isLast) {
acc[acc.length - 1].builder.storeRef(builder.cell());
}
else {
acc.push({ indent, builder });
}
return acc;
}, []);
return stack.map(el => el.builder.cell());
};
exports.deserializeFift = deserializeFift;
const deserializeHeader = (bytes) => {
if (bytes.length < 4 + 1) {
throw new Error('Not enough bytes for magic prefix');
}
const crcbytes = Uint8Array.from(bytes.slice(0, bytes.length - 4));
const prefix = bytes.splice(0, 4);
const [flags_byte] = bytes.splice(0, 1);
const header = {
has_index: true,
hash_crc32: null,
has_cache_bits: false,
flags: 0,
size_bytes: flags_byte,
offset_bytes: null,
cells_num: null,
roots_num: null,
absent_num: null,
tot_cells_size: null,
root_list: null,
cells_data: null
};
if ((0, helpers_1.bytesCompare)(prefix, REACH_BOC_MAGIC_PREFIX)) {
header.has_index = (flags_byte & 128) !== 0;
header.has_cache_bits = (flags_byte & 32) !== 0;
header.flags = (flags_byte & 16) * 2 + (flags_byte & 8);
header.size_bytes = flags_byte % 8;
header.hash_crc32 = flags_byte & 64;
}
else if ((0, helpers_1.bytesCompare)(prefix, LEAN_BOC_MAGIC_PREFIX)) {
header.hash_crc32 = 0;
}
else if ((0, helpers_1.bytesCompare)(prefix, LEAN_BOC_MAGIC_PREFIX_CRC)) {
header.hash_crc32 = 1;
}
else {
throw new Error('Bad magic prefix');
}
if (bytes.length < 1 + 5 * header.size_bytes) {
throw new Error('Not enough bytes for encoding cells counters');
}
const [offset_bytes] = bytes.splice(0, 1);
header.cells_num = (0, helpers_1.bytesToUint)(bytes.splice(0, header.size_bytes));
header.roots_num = (0, helpers_1.bytesToUint)(bytes.splice(0, header.size_bytes));
header.absent_num = (0, helpers_1.bytesToUint)(bytes.splice(0, header.size_bytes));
header.tot_cells_size = (0, helpers_1.bytesToUint)(bytes.splice(0, offset_bytes));
header.offset_bytes = offset_bytes;
if (bytes.length < header.roots_num * header.size_bytes) {
throw new Error('Not enough bytes for encoding root cells hashes');
}
header.root_list = [...Array(header.roots_num)].reduce((acc) => {
const refIndex = (0, helpers_1.bytesToUint)(bytes.splice(0, header.size_bytes));
return acc.concat([refIndex]);
}, []);
if (header.has_index) {
if (bytes.length < header.offset_bytes * header.cells_num) {
throw new Error('Not enough bytes for index encoding');
}
Object.keys([...Array(header.cells_num)])
.forEach(() => bytes.splice(0, header.offset_bytes));
}
if (bytes.length < header.tot_cells_size) {
throw new Error('Not enough bytes for cells data');
}
header.cells_data = bytes.splice(0, header.tot_cells_size);
if (header.hash_crc32) {
if (bytes.length < 4) {
throw new Error('Not enough bytes for crc32c hashsum');
}
const result = (0, checksum_1.crc32cBytesLe)(crcbytes);
if (!(0, helpers_1.bytesCompare)(result, bytes.splice(0, 4))) {
throw new Error('Crc32c hashsum mismatch');
}
}
if (bytes.length) {
throw new Error('Too much bytes in BoC serialization');
}
return header;
};
const deserializeCell = (remainder, refIndexSize) => {
if (remainder.length < 2) {
throw new Error('BoC not enough bytes to encode cell descriptors');
}
const [refsDescriptor] = remainder.splice(0, 1);
const level = refsDescriptor >> 5;
const totalRefs = refsDescriptor & 7;
const hasHashes = (refsDescriptor & 16) !== 0;
const isExotic = (refsDescriptor & 8) !== 0;
const isAbsent = totalRefs === 7 && hasHashes;
if (isAbsent) {
throw new Error(`BoC can't deserialize absent cell`);
}
if (totalRefs > 4) {
throw new Error(`BoC cell can't has more than 4 refs ${totalRefs}`);
}
const [bitsDescriptor] = remainder.splice(0, 1);
const isAugmented = (bitsDescriptor & 1) !== 0;
const dataSize = (bitsDescriptor >> 1) + Number(isAugmented);
const hashesSize = hasHashes ? (level + 1) * 32 : 0;
const depthSize = hasHashes ? (level + 1) * 2 : 0;
if (remainder.length < hashesSize + depthSize + dataSize + refIndexSize * totalRefs) {
throw new Error('BoC not enough bytes to encode cell data');
}
if (hasHashes) {
remainder.splice(0, hashesSize + depthSize);
}
const bits = isAugmented
? (0, bits_1.rollback)((0, helpers_1.bytesToBits)(remainder.splice(0, dataSize)))
: (0, helpers_1.bytesToBits)(remainder.splice(0, dataSize));
if (isExotic && bits.length < 8) {
throw new Error('BoC not enough bytes for an exotic cell type');
}
const type = isExotic
? (0, numbers_1.bitsToIntUint)(bits.slice(0, 8), { type: 'int' })
: cell_1.CellType.Ordinary;
if (isExotic && type === cell_1.CellType.Ordinary) {
throw new Error(`BoC an exotic cell can't be of ordinary type`);
}
const pointer = {
type,
builder: new builder_1.Builder(bits.length).storeBits(bits),
refs: Array.from({ length: totalRefs }).map(() => (0, helpers_1.bytesToUint)(remainder.splice(0, refIndexSize)))
};
return {
pointer,
remainder
};
};
const deserialize = (data, checkMerkleProofs) => {
let hasMerkleProofs = false;
const bytes = Array.from(data);
const pointers = [];
const { cells_num, size_bytes, cells_data, root_list } = deserializeHeader(bytes);
for (let i = 0, remainder = cells_data; i < cells_num; i += 1) {
const deserialized = deserializeCell(remainder, size_bytes);
remainder = deserialized.remainder;
pointers.push(deserialized.pointer);
}
Array.from({ length: pointers.length }).forEach((_el, i) => {
const pointerIndex = pointers.length - i - 1;
const pointer = pointers[pointerIndex];
const { builder: cellBuilder, type: cellType } = pointer;
pointer.refs.forEach((refIndex) => {
const { builder: refBuilder, type: refType } = pointers[refIndex];
if (refIndex < pointerIndex) {
throw new Error('Topological order is broken');
}
if (refType === cell_1.CellType.MerkleProof || refType === cell_1.CellType.MerkleUpdate) {
hasMerkleProofs = true;
}
cellBuilder.storeRef(refBuilder.cell(refType));
});
if (cellType === cell_1.CellType.MerkleProof || cellType === cell_1.CellType.MerkleUpdate) {
hasMerkleProofs = true;
}
pointer.cell = cellBuilder.cell(cellType);
});
if (checkMerkleProofs && !hasMerkleProofs) {
throw new Error('BOC does not contain Merkle Proofs');
}
return root_list.map(refIndex => pointers[refIndex].cell);
};
exports.deserialize = deserialize;
const depthFirstSort = (root) => {
const stack = [{ cell: new cell_1.Cell({ refs: root }), children: root.length, scanned: 0 }];
const cells = [];
const hashIndexes = new Map();
const process = (node) => {
const ref = node.cell.refs[node.scanned++];
const hash = ref.hash();
const index = hashIndexes.get(hash);
const length = index !== undefined
? cells.push(cells.splice(index, 1, null)[0])
: cells.push({ cell: ref, hash });
stack.push({ cell: ref, children: ref.refs.length, scanned: 0 });
hashIndexes.set(hash, length - 1);
};
while (stack.length) {
let current = stack[stack.length - 1];
if (current.children !== current.scanned) {
process(current);
}
else {
while (stack.length && current && current.children === current.scanned) {
stack.pop();
current = stack[stack.length - 1];
}
if (current !== undefined) {
process(current);
}
}
}
const result = cells
.filter(el => el !== null)
.reduce((acc, { cell, hash }, i) => {
acc.cells.push(cell);
acc.hashmap.set(hash, i);
return acc;
}, { cells: [], hashmap: new Map() });
return result;
};
const breadthFirstSort = (root) => {
const stack = [...root];
const cells = root.map(el => ({ cell: el, hash: el.hash() }));
const hashIndexes = new Map(cells.map((el, i) => ([el.hash, i])));
const process = (node) => {
const hash = node.hash();
const index = hashIndexes.get(hash);
const length = index !== undefined
? cells.push(cells.splice(index, 1, null)[0])
: cells.push({ cell: node, hash });
stack.push(node);
hashIndexes.set(hash, length - 1);
};
while (stack.length) {
const { length } = stack;
stack.forEach((node) => {
node.refs.forEach(ref => process(ref));
});
stack.splice(0, length);
}
const result = cells
.filter(el => el !== null)
.reduce((acc, { cell, hash }, i) => {
acc.cells.push(cell);
acc.hashmap.set(hash, i);
return acc;
}, { cells: [], hashmap: new Map() });
return result;
};
const serializeCell = (cell, hashmap, refIndexSize) => {
const representation = [].concat(cell.getRefsDescriptor(), cell.getBitsDescriptor(), cell.getAugmentedBits());
const serialized = cell.refs.reduce((acc, ref) => {
const refIndex = hashmap.get(ref.hash());
const bits = Array.from({ length: refIndexSize })
.map((_el, i) => Number(((refIndex >> i) & 1) === 1))
.reverse();
return acc.concat(bits);
}, representation);
return serialized;
};
const serialize = (root, options = {}) => {
const { has_index = false, has_cache_bits = false, hash_crc32 = true, topological_order = 'breadth-first', flags = 0 } = options;
const { cells: cells_list, hashmap } = topological_order === 'breadth-first'
? breadthFirstSort(root)
: depthFirstSort(root);
const cells_num = cells_list.length;
const size = cells_num.toString(2).length;
const size_bytes = Math.max(Math.ceil(size / 8), 1);
const [cells_bits, size_index] = cells_list.reduce((acc, cell) => {
const bits = serializeCell(cell, hashmap, size_bytes * 8);
acc[0] = acc[0].concat(bits);
acc[1].push(bits.length / 8);
return acc;
}, [[], []]);
const full_size = cells_bits.length / 8;
const offset_bits = full_size.toString(2).length;
const offset_bytes = Math.max(Math.ceil(offset_bits / 8), 1);
const builder_size = (32 + 3 + 2 + 3 + 8)
+ (cells_bits.length)
+ ((size_bytes * 8) * 4)
+ (offset_bytes * 8)
+ (has_index ? (cells_list.length * (offset_bytes * 8)) : 0);
const result = new builder_1.Builder(builder_size);
result.storeBytes(REACH_BOC_MAGIC_PREFIX)
.storeBit(Number(has_index))
.storeBit(Number(hash_crc32))
.storeBit(Number(has_cache_bits))
.storeUint(flags, 2)
.storeUint(size_bytes, 3)
.storeUint(offset_bytes, 8)
.storeUint(cells_num, size_bytes * 8)
.storeUint(root.length, size_bytes * 8)
.storeUint(0, size_bytes * 8)
.storeUint(full_size, offset_bytes * 8)
.storeUint(0, size_bytes * 8);
if (has_index) {
cells_list.forEach((_, index) => {
result.storeUint(size_index[index], offset_bytes * 8);
});
}
const augmentedBits = (0, bits_1.augment)(result.storeBits(cells_bits).bits);
const bytes = (0, helpers_1.bitsToBytes)(augmentedBits);
if (hash_crc32) {
const hashsum = (0, checksum_1.crc32cBytesLe)(bytes);
return new Uint8Array([...bytes, ...hashsum]);
}
return bytes;
};
exports.serialize = serialize;
//# sourceMappingURL=serializer.js.map