@bsv/sdk
Version:
BSV Blockchain Software Development Kit
766 lines • 29.3 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Beef = exports.TX_DATA_FORMAT = exports.ATOMIC_BEEF = exports.BEEF_V2 = exports.BEEF_V1 = void 0;
const MerklePath_js_1 = __importDefault(require("./MerklePath.js"));
const BeefTx_js_1 = __importDefault(require("./BeefTx.js"));
const utils_js_1 = require("../primitives/utils.js");
const Hash_js_1 = require("../primitives/Hash.js");
function verifyTruthy(v) {
if (v == null)
throw new Error('Expected a valid value, but got undefined.');
return v;
}
exports.BEEF_V1 = 4022206465; // 0100BEEF in LE order
exports.BEEF_V2 = 4022206466; // 0200BEEF in LE order
exports.ATOMIC_BEEF = 0x01010101; // 01010101
var TX_DATA_FORMAT;
(function (TX_DATA_FORMAT) {
TX_DATA_FORMAT[TX_DATA_FORMAT["RAWTX"] = 0] = "RAWTX";
TX_DATA_FORMAT[TX_DATA_FORMAT["RAWTX_AND_BUMP_INDEX"] = 1] = "RAWTX_AND_BUMP_INDEX";
TX_DATA_FORMAT[TX_DATA_FORMAT["TXID_ONLY"] = 2] = "TXID_ONLY";
})(TX_DATA_FORMAT || (exports.TX_DATA_FORMAT = TX_DATA_FORMAT = {}));
/*
* BEEF standard: BRC-62: Background Evaluation Extended Format (BEEF) Transactions
* https://github.com/bsv-blockchain/BRCs/blob/master/transactions/0062.md
*
* BUMP standard: BRC-74: BSV Unified Merkle Path (BUMP) Format
* https://github.com/bsv-blockchain/BRCs/blob/master/transactions/0074.md
*
* BRC-95: Atomic BEEF Transactions
* https://github.com/bsv-blockchain/BRCs/blob/master/transactions/0095.md
*
* The Atomic BEEF format is supported by the binary deserialization static method `fromBinary`.
*
* BRC-96: BEEF V2, Txid Only Extension
* https://github.com/bsv-blockchain/BRCs/blob/master/transactions/0096.md
*
* A valid serialized BEEF is the cornerstone of Simplified Payment Validation (SPV)
* where they are exchanged between two non-trusting parties to establish the
* validity of a newly constructed bitcoin transaction and its inputs from prior
* transactions.
*
* A `Beef` is fundamentally an list of `BUMP`s and a list of transactions.
*
* A `BUMP` is a partial merkle tree for a 'mined' bitcoin block.
* It can therefore be used to prove the validity of transaction data
* for each transaction txid whose merkle path is included in the tree.
*
* To be valid, the list of transactions must be sorted in dependency order:
* oldest transaction first;
* and each transaction must either
* have a merkle path in one of the BUMPs, or
* have all of its input transactions included in the list of transactions.
*
* The `Beef` class supports the construction of valid BEEFs by allowing BUMPs
* (merkle paths) and transactions to be merged sequentially.
*
* The `Beef` class also extends the standard by supporting 'known' transactions.
* A 'known' transaction is represented solely by its txid.
* To become valid, all the 'known' transactions in a `Beef` must be replaced by full
* transactions and merkle paths, if they are mined.
*
* The purpose of supporting 'known' transactions is that one or both parties
* generating and exchanging BEEFs often possess partial knowledge of valid transactions
* due to their history.
*
* A valid `Beef` is only required when sent to a party with no shared history,
* such as a transaction processor.
*
* IMPORTANT NOTE:
* It is fundamental to the BEEF value proposition that only valid transactions and valid
* merkle path (BUMP) data be added to it. Merging invalid data breaks the `verify` and `isValid`
* functions. There is no support for removing invalid data. A `Beef` that becomes invalid
* must be discarded.
*/
class Beef {
constructor(version = exports.BEEF_V2) {
this.bumps = [];
this.txs = [];
this.version = exports.BEEF_V2;
this.atomicTxid = undefined;
this.version = version;
}
/**
* @param txid of `beefTx` to find
* @returns `BeefTx` in `txs` with `txid`.
*/
findTxid(txid) {
return this.txs.find((tx) => tx.txid === txid);
}
/**
* Replaces `BeefTx` for this txid with txidOnly.
*
* Replacement is done so that a `clone()` can be
* updated by this method without affecting the
* original.
*
* @param txid
* @returns undefined if txid is unknown.
*/
makeTxidOnly(txid) {
const i = this.txs.findIndex((tx) => tx.txid === txid);
if (i === -1)
return undefined;
let btx = this.txs[i];
if (btx.isTxidOnly) {
return btx;
}
this.txs.splice(i, 1);
btx = this.mergeTxidOnly(txid);
return btx;
}
/**
* @returns `MerklePath` with level zero hash equal to txid or undefined.
*/
findBump(txid) {
return this.bumps.find((b) => b.path[0].some((leaf) => leaf.hash === txid) // ✅ Ensure boolean return with `.some()`
);
}
/**
* Finds a Transaction in this `Beef`
* and adds any missing input SourceTransactions from this `Beef`.
*
* The result is suitable for signing.
*
* @param txid The id of the target transaction.
* @returns Transaction with all available input `SourceTransaction`s from this Beef.
*/
findTransactionForSigning(txid) {
const beefTx = this.findTxid(txid);
if ((beefTx == null) || (beefTx.tx == null))
return undefined; // Ensure beefTx.tx exists before using it
for (const i of beefTx.tx.inputs) {
if (i.sourceTransaction == null) {
const itx = this.findTxid(verifyTruthy(i.sourceTXID)); // Ensure sourceTXID is valid
if (itx != null) {
i.sourceTransaction = itx.tx;
}
}
}
return beefTx.tx;
}
/**
* Builds the proof tree rooted at a specific `Transaction`.
*
* To succeed, the Beef must contain all the required transaction and merkle path data.
*
* @param txid The id of the target transaction.
* @returns Transaction with input `SourceTransaction` and `MerklePath` populated from this Beef.
*/
findAtomicTransaction(txid) {
const beefTx = this.findTxid(txid);
if ((beefTx == null) || (beefTx.tx == null))
return undefined; // Ensure beefTx.tx exists before using it
const addInputProof = (beef, tx) => {
const mp = beef.findBump(tx.id('hex'));
if (mp != null) {
tx.merklePath = mp;
}
else {
for (const i of tx.inputs) {
if (i.sourceTransaction == null) {
const itx = beef.findTxid(verifyTruthy(i.sourceTXID)); // Ensure sourceTXID is valid
if (itx != null) {
i.sourceTransaction = itx.tx;
}
}
if (i.sourceTransaction != null) {
const mp = beef.findBump(i.sourceTransaction.id('hex'));
if (mp != null) {
i.sourceTransaction.merklePath = mp;
}
else {
addInputProof(beef, i.sourceTransaction);
}
}
}
}
};
addInputProof(this, beefTx.tx); // Safe because we checked that beefTx.tx exists
return beefTx.tx;
}
/**
* Merge a MerklePath that is assumed to be fully valid.
* @param bump
* @returns index of merged bump
*/
mergeBump(bump) {
let bumpIndex;
// If this proof is identical to another one previously added, we use that first. Otherwise, we try to merge it with proofs from the same block.
for (let i = 0; i < this.bumps.length; i++) {
const b = this.bumps[i];
if (b === bump) {
// Literally the same
return i;
}
if (b.blockHeight === bump.blockHeight) {
// Probably the same...
const rootA = b.computeRoot();
const rootB = bump.computeRoot();
if (rootA === rootB) {
// Definitely the same... combine them to save space
b.combine(bump);
bumpIndex = i;
break;
}
}
}
// if the proof is not yet added, add a new path.
if (bumpIndex === undefined) {
bumpIndex = this.bumps.length;
this.bumps.push(bump);
}
// Review if any transactions are proven by this bump
const b = this.bumps[bumpIndex];
for (const tx of this.txs) {
const txid = tx.txid;
if (tx.bumpIndex == null) { // ✅ Explicitly check for null or undefined
for (const n of b.path[0]) {
if (n.hash === txid) {
tx.bumpIndex = bumpIndex;
n.txid = true;
break;
}
}
}
}
return bumpIndex;
}
/**
* Merge a serialized transaction.
*
* Checks that a transaction with the same txid hasn't already been merged.
*
* Replaces existing transaction with same txid.
*
* @param rawTx
* @param bumpIndex Optional. If a number, must be valid index into bumps array.
* @returns txid of rawTx
*/
mergeRawTx(rawTx, bumpIndex) {
const newTx = new BeefTx_js_1.default(rawTx, bumpIndex);
this.removeExistingTxid(newTx.txid);
this.txs.push(newTx);
this.tryToValidateBumpIndex(newTx);
return newTx;
}
/**
* Merge a `Transaction` and any referenced `merklePath` and `sourceTransaction`, recursifely.
*
* Replaces existing transaction with same txid.
*
* Attempts to match an existing bump to the new transaction.
*
* @param tx
* @returns txid of tx
*/
mergeTransaction(tx) {
const txid = tx.id('hex');
this.removeExistingTxid(txid);
let bumpIndex;
if (tx.merklePath != null) {
bumpIndex = this.mergeBump(tx.merklePath);
}
const newTx = new BeefTx_js_1.default(tx, bumpIndex);
this.txs.push(newTx);
this.tryToValidateBumpIndex(newTx);
bumpIndex = newTx.bumpIndex;
if (bumpIndex === undefined) {
for (const input of tx.inputs) {
if (input.sourceTransaction != null) {
this.mergeTransaction(input.sourceTransaction);
}
}
}
return newTx;
}
/**
* Removes an existing transaction from the BEEF, given its TXID
* @param txid TXID of the transaction to remove
*/
removeExistingTxid(txid) {
const existingTxIndex = this.txs.findIndex((t) => t.txid === txid);
if (existingTxIndex >= 0) {
this.txs.splice(existingTxIndex, 1);
}
}
mergeTxidOnly(txid) {
let tx = this.txs.find((t) => t.txid === txid);
if (tx == null) {
tx = new BeefTx_js_1.default(txid);
this.txs.push(tx);
this.tryToValidateBumpIndex(tx);
}
return tx;
}
mergeBeefTx(btx) {
let beefTx = this.findTxid(btx.txid);
if (btx.isTxidOnly && (beefTx == null)) {
beefTx = this.mergeTxidOnly(btx.txid);
}
else if ((btx._tx != null) && ((beefTx == null) || beefTx.isTxidOnly)) {
beefTx = this.mergeTransaction(btx._tx);
}
else if ((btx._rawTx != null) && ((beefTx == null) || beefTx.isTxidOnly)) {
beefTx = this.mergeRawTx(btx._rawTx);
}
if (beefTx == null) {
throw new Error(`Failed to merge BeefTx for txid: ${btx.txid}`);
}
return beefTx;
}
mergeBeef(beef) {
const b = Array.isArray(beef) ? Beef.fromBinary(beef) : beef;
for (const bump of b.bumps) {
this.mergeBump(bump);
}
for (const tx of b.txs) {
this.mergeBeefTx(tx);
}
}
/**
* Sorts `txs` and checks structural validity of beef.
*
* Does NOT verify merkle roots.
*
* Validity requirements:
* 1. No 'known' txids, unless `allowTxidOnly` is true.
* 2. All transactions have bumps or their inputs chain back to bumps (or are known).
* 3. Order of transactions satisfies dependencies before dependents.
* 4. No transactions with duplicate txids.
*
* @param allowTxidOnly optional. If true, transaction txid only is assumed valid
*/
isValid(allowTxidOnly) {
return this.verifyValid(allowTxidOnly).valid;
}
/**
* Sorts `txs` and confirms validity of transaction data contained in beef
* by validating structure of this beef and confirming computed merkle roots
* using `chainTracker`.
*
* Validity requirements:
* 1. No 'known' txids, unless `allowTxidOnly` is true.
* 2. All transactions have bumps or their inputs chain back to bumps (or are known).
* 3. Order of transactions satisfies dependencies before dependents.
* 4. No transactions with duplicate txids.
*
* @param chainTracker Used to verify computed merkle path roots for all bump txids.
* @param allowTxidOnly optional. If true, transaction txid is assumed valid
*/
async verify(chainTracker, allowTxidOnly) {
const r = this.verifyValid(allowTxidOnly);
if (!r.valid)
return false;
for (const height of Object.keys(r.roots)) {
const isValid = await chainTracker.isValidRootForHeight(r.roots[height], Number(height));
if (!isValid) {
return false;
}
}
return true;
}
/**
* Sorts `txs` and confirms validity of transaction data contained in beef
* by validating structure of this beef.
*
* Returns block heights and merkle root values to be confirmed by a chaintracker.
*
* Validity requirements:
* 1. No 'known' txids, unless `allowTxidOnly` is true.
* 2. All transactions have bumps or their inputs chain back to bumps (or are known).
* 3. Order of transactions satisfies dependencies before dependents.
* 4. No transactions with duplicate txids.
*
* @param allowTxidOnly optional. If true, transaction txid is assumed valid
* @returns {{valid: boolean, roots: Record<number, string>}}
* `valid` is true iff this Beef is structuraly valid.
* `roots` is a record where keys are block heights and values are the corresponding merkle roots to be validated.
*/
verifyValid(allowTxidOnly) {
const r = {
valid: false,
roots: {}
};
const sr = this.sortTxs();
if (sr.missingInputs.length > 0 ||
sr.notValid.length > 0 ||
(sr.txidOnly.length > 0 && allowTxidOnly !== true) ||
sr.withMissingInputs.length > 0) {
return r;
}
// valid txids: only txids if allowed, bump txids, then txids with input's in txids
const txids = {};
for (const tx of this.txs) {
if (tx.isTxidOnly) {
if (allowTxidOnly !== true)
return r; // ✅ Explicit check for `true`
txids[tx.txid] = true;
}
}
const confirmComputedRoot = (b, txid) => {
const root = b.computeRoot(txid);
if (r.roots[b.blockHeight] === undefined || r.roots[b.blockHeight] === '') {
// accept the root as valid for this block and reuse for subsequent txids
r.roots[b.blockHeight] = root;
}
if (r.roots[b.blockHeight] !== root) {
return false;
}
return true;
};
for (const b of this.bumps) {
for (const n of b.path[0]) {
if (n.txid === true && typeof n.hash === 'string' && n.hash.length > 0) {
txids[n.hash] = true;
// All txid hashes in all bumps must agree on computed merkle path roots
if (!confirmComputedRoot(b, n.hash)) {
return r;
}
}
}
}
// All txs with a bumpIndex have matching txid leaf at level zero of BUMP.
for (const t of this.txs) {
if (t.bumpIndex !== undefined) {
const leaf = this.bumps[t.bumpIndex].path[0].find(l => l.hash === t.txid);
if (leaf == null) {
return r;
}
}
}
for (const t of this.txs) {
// all input txids must be included before they are referenced
for (const i of t.inputTxids) {
if (!txids[i])
return r;
}
txids[t.txid] = true;
}
r.valid = true;
return r;
}
/**
* Serializes this data to `writer`
* @param writer
*/
toWriter(writer) {
writer.writeUInt32LE(this.version);
writer.writeVarIntNum(this.bumps.length);
for (const b of this.bumps) {
writer.write(b.toBinary());
}
writer.writeVarIntNum(this.txs.length);
for (const tx of this.txs) {
tx.toWriter(writer, this.version);
}
}
/**
* Returns a binary array representing the serialized BEEF
* @returns A binary array representing the BEEF
*/
toBinary() {
// Always serialize in dependency sorted order.
this.sortTxs();
const writer = new utils_js_1.Writer();
this.toWriter(writer);
return writer.toArray();
}
/**
* Serialize this Beef as AtomicBEEF.
*
* `txid` must exist
*
* after sorting, if txid is not last txid, creates a clone and removes newer txs
*
* @param txid
* @returns serialized contents of this Beef with AtomicBEEF prefix.
*/
toBinaryAtomic(txid) {
this.sortTxs();
const tx = this.findTxid(txid);
if (tx == null) {
throw new Error(`${txid} does not exist in this Beef`);
}
// If the transaction is not the last one, clone and modify
const beef = (this.txs[this.txs.length - 1] === tx) ? this : this.clone();
if (beef !== this) {
const i = this.txs.findIndex((t) => t.txid === txid);
beef.txs.splice(i + 1);
}
const writer = new utils_js_1.Writer();
writer.writeUInt32LE(exports.ATOMIC_BEEF);
writer.writeReverse((0, utils_js_1.toArray)(txid, 'hex'));
beef.toWriter(writer);
return writer.toArray();
}
/**
* Returns a hex string representing the serialized BEEF
* @returns A hex string representing the BEEF
*/
toHex() {
return (0, utils_js_1.toHex)(this.toBinary());
}
static fromReader(br) {
let version = br.readUInt32LE();
let atomicTxid;
if (version === exports.ATOMIC_BEEF) {
// Skip the txid and re-read the BEEF version
atomicTxid = (0, utils_js_1.toHex)(br.readReverse(32));
version = br.readUInt32LE();
}
if (version !== exports.BEEF_V1 && version !== exports.BEEF_V2) {
throw new Error(`Serialized BEEF must start with ${exports.BEEF_V1} or ${exports.BEEF_V2} but starts with ${version}`);
}
const beef = new Beef(version);
const bumpsLength = br.readVarIntNum();
for (let i = 0; i < bumpsLength; i++) {
const bump = MerklePath_js_1.default.fromReader(br, false);
beef.bumps.push(bump);
}
const txsLength = br.readVarIntNum();
for (let i = 0; i < txsLength; i++) {
const beefTx = BeefTx_js_1.default.fromReader(br, version);
beef.txs.push(beefTx);
}
beef.atomicTxid = atomicTxid;
return beef;
}
/**
* Constructs an instance of the Beef class based on the provided binary array
* @param bin The binary array from which to construct BEEF
* @returns An instance of the Beef class constructed from the binary data
*/
static fromBinary(bin) {
const br = new utils_js_1.Reader(bin);
return Beef.fromReader(br);
}
/**
* Constructs an instance of the Beef class based on the provided string
* @param s The string value from which to construct BEEF
* @param enc The encoding of the string value from which BEEF should be constructed
* @returns An instance of the Beef class constructed from the string
*/
static fromString(s, enc = 'hex') {
const bin = (0, utils_js_1.toArray)(s, enc);
const br = new utils_js_1.Reader(bin);
return Beef.fromReader(br);
}
/**
* Try to validate newTx.bumpIndex by looking for an existing bump
* that proves newTx.txid
*
* @param newTx A new `BeefTx` that has been added to this.txs
* @returns true if a bump was found, false otherwise
*/
tryToValidateBumpIndex(newTx) {
if (newTx.bumpIndex !== undefined) {
return true;
}
const txid = newTx.txid;
for (let i = 0; i < this.bumps.length; i++) {
const j = this.bumps[i].path[0].findIndex((b) => b.hash === txid);
if (j >= 0) {
newTx.bumpIndex = i;
this.bumps[i].path[0][j].txid = true;
return true;
}
}
return false;
}
/**
* Sort the `txs` by input txid dependency order:
* - Oldest Tx Anchored by Path or txid only
* - Newer Txs depending on Older parents
* - Newest Tx
*
* with proof (MerklePath) last, longest chain of dependencies first
*
* @returns `{ missingInputs, notValid, valid, withMissingInputs }`
*/
sortTxs() {
// Hashtable of valid txids (with proof or all inputs chain to proof)
const validTxids = {};
// Hashtable of all transaction txids to transaction
const txidToTx = {};
// queue of unsorted transactions ...
let queue = [];
// sorted transactions: hasProof to with longest dependency chain
const result = [];
const txidOnly = [];
for (const tx of this.txs) {
txidToTx[tx.txid] = tx;
tx.isValid = tx.hasProof;
if (tx.isValid) {
validTxids[tx.txid] = true;
result.push(tx);
}
else if (tx.isTxidOnly && tx.inputTxids.length === 0) {
validTxids[tx.txid] = true;
txidOnly.push(tx);
}
else {
queue.push(tx);
}
}
// Hashtable of unknown input txids used to fund transactions without their own proof.
const missingInputs = {};
// transactions with one or more missing inputs
const txsMissingInputs = [];
const possiblyMissingInputs = queue;
queue = [];
// all tx are isValid false, hasProof false.
// if isTxidOnly then has inputTxids
for (const tx of possiblyMissingInputs) {
let hasMissingInput = false;
// For all the unproven transactions,
// link their inputs that exist in this beef,
// make a note of missing inputs.
for (const inputTxid of tx.inputTxids) {
if (txidToTx[inputTxid] === undefined) { // Explicitly check for undefined
missingInputs[inputTxid] = true;
hasMissingInput = true;
}
}
if (hasMissingInput) {
txsMissingInputs.push(tx);
}
else {
queue.push(tx);
}
}
// As long as we have unsorted transactions...
while (queue.length > 0) {
const oldQueue = queue;
queue = [];
// all tx are isValid false, hasProof false.
// if isTxidOnly then has inputTxids
for (const tx of oldQueue) {
if (tx.inputTxids.every((txid) => validTxids[txid])) {
validTxids[tx.txid] = true;
result.push(tx);
}
else {
queue.push(tx);
}
}
if (oldQueue.length === queue.length) {
break;
}
}
// transactions that don't have proofs and don't chain to proofs
const txsNotValid = queue;
// New order of txs is unsortable (missing inputs or depends on missing inputs), txidOnly, sorted (so newest sorted is last)
this.txs = txsMissingInputs
.concat(txsNotValid)
.concat(txidOnly)
.concat(result);
return {
missingInputs: Object.keys(missingInputs),
notValid: txsNotValid.map((tx) => tx.txid),
valid: Object.keys(validTxids),
withMissingInputs: txsMissingInputs.map((tx) => tx.txid),
txidOnly: txidOnly.map((tx) => tx.txid)
};
}
/**
* @returns a shallow copy of this beef
*/
clone() {
const c = new Beef();
c.version = this.version;
c.bumps = Array.from(this.bumps);
c.txs = Array.from(this.txs);
return c;
}
/**
* Ensure that all the txids in `knownTxids` are txidOnly
* @param knownTxids
*/
trimKnownTxids(knownTxids) {
for (let i = 0; i < this.txs.length;) {
const tx = this.txs[i];
if (tx.isTxidOnly && knownTxids.includes(tx.txid)) {
this.txs.splice(i, 1);
}
else {
i++;
}
}
// TODO: bumps could be trimmed to eliminate unreferenced proofs.
}
/**
* @returns array of transaction txids that either have a proof or whose inputs chain back to a proven transaction.
*/
getValidTxids() {
const r = this.sortTxs();
return r.valid;
}
/**
* @returns Summary of `Beef` contents as multi-line string.
*/
toLogString() {
let log = '';
log += `BEEF with ${this.bumps.length} BUMPS and ${this.txs.length} Transactions, isValid ${this.isValid().toString()}\n`;
let i = -1;
for (const b of this.bumps) {
i++;
log += ` BUMP ${i}\n block: ${b.blockHeight}\n txids: [\n${b.path[0]
.filter((n) => n.txid === true) // ✅ Explicitly check if txid is `true`
.map((n) => ` '${n.hash ?? ''}'`)
.join(',\n')}\n ]\n`;
}
i = -1;
for (const t of this.txs) {
i++;
log += ` TX ${i}\n txid: ${t.txid}\n`;
if (t.bumpIndex !== undefined) {
log += ` bumpIndex: ${t.bumpIndex}\n`;
}
if (t.isTxidOnly) {
log += ' txidOnly\n';
}
else {
log += ` rawTx length=${t.rawTx?.length ?? 0}\n`; // ✅ Fix applied here
}
if (t.inputTxids.length > 0) {
log += ` inputs: [\n${t.inputTxids
.map((it) => ` '${it}'`)
.join(',\n')}\n ]\n`;
}
}
return log;
}
/**
* In some circumstances it may be helpful for the BUMP MerklePaths to include
* leaves that can be computed from row zero.
*/
addComputedLeaves() {
const hash = (m) => (0, utils_js_1.toHex)((0, Hash_js_1.hash256)((0, utils_js_1.toArray)(m, 'hex').reverse()).reverse());
for (const bump of this.bumps) { // ✅ Use `this` instead of `beef`
for (let row = 1; row < bump.path.length; row++) {
for (const leafL of bump.path[row - 1]) {
if (typeof leafL.hash === 'string' && (leafL.offset & 1) === 0) {
const leafR = bump.path[row - 1].find((l) => l.offset === leafL.offset + 1);
const offsetOnRow = leafL.offset >> 1;
if (leafR !== undefined &&
typeof leafR.hash === 'string' &&
bump.path[row].every((l) => l.offset !== offsetOnRow)) {
// Computable leaf is missing... add it.
bump.path[row].push({
offset: offsetOnRow,
// String concatenation puts the right leaf on the left of the left leaf hash
hash: hash(leafR.hash + leafL.hash)
});
}
}
}
}
}
}
}
exports.Beef = Beef;
//# sourceMappingURL=Beef.js.map