@iden3/js-merkletree
Version:
javascript sparse merkle tree library
1,466 lines (1,440 loc) • 41.9 kB
JavaScript
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/index.ts
var index_exports = {};
__export(index_exports, {
CircomProcessorProof: () => CircomProcessorProof,
CircomVerifierProof: () => CircomVerifierProof,
DATA_LEN: () => DATA_LEN,
DATA_LEN_BYTES: () => DATA_LEN_BYTES,
Data: () => Data,
ELEM_BYTES_LEN: () => ELEM_BYTES_LEN,
EMPTY_NODE_STRING: () => EMPTY_NODE_STRING,
EMPTY_NODE_VALUE: () => EMPTY_NODE_VALUE,
ElemBytes: () => ElemBytes,
Entry: () => Entry,
ErrEntryIndexAlreadyExists: () => ErrEntryIndexAlreadyExists,
ErrInvalidDBValue: () => ErrInvalidDBValue,
ErrInvalidNodeFound: () => ErrInvalidNodeFound,
ErrInvalidProofBytes: () => ErrInvalidProofBytes,
ErrKeyNotFound: () => ErrKeyNotFound,
ErrNodeBytesBadSize: () => ErrNodeBytesBadSize,
ErrNodeKeyAlreadyExists: () => ErrNodeKeyAlreadyExists,
ErrNotFound: () => ErrNotFound,
ErrNotWritable: () => ErrNotWritable,
ErrReachedMaxLevel: () => ErrReachedMaxLevel,
FIELD_SIZE: () => FIELD_SIZE,
HASH_BYTES_LENGTH: () => HASH_BYTES_LENGTH,
Hash: () => Hash,
InMemoryDB: () => InMemoryDB,
IndexedDBStorage: () => IndexedDBStorage,
LocalStorageDB: () => LocalStorageDB,
MAX_NUM_IN_FIELD: () => MAX_NUM_IN_FIELD,
Merkletree: () => Merkletree,
NODE_TYPE_EMPTY: () => NODE_TYPE_EMPTY,
NODE_TYPE_LEAF: () => NODE_TYPE_LEAF,
NODE_TYPE_MIDDLE: () => NODE_TYPE_MIDDLE,
NODE_VALUE_BYTE_ARR_LENGTH: () => NODE_VALUE_BYTE_ARR_LENGTH,
NOT_EMPTIES_LEN: () => NOT_EMPTIES_LEN,
NodeEmpty: () => NodeEmpty,
NodeLeaf: () => NodeLeaf,
NodeMiddle: () => NodeMiddle,
PROOF_FLAG_LEN: () => PROOF_FLAG_LEN,
Proof: () => Proof,
ZERO_HASH: () => ZERO_HASH,
bigIntToUINT8Array: () => bigIntToUINT8Array,
bigint2Array: () => bigint2Array,
bytes2BinaryString: () => bytes2BinaryString,
bytes2Hex: () => bytes2Hex,
bytesEqual: () => bytesEqual,
checkBigIntInField: () => checkBigIntInField,
checkEntryInField: () => checkEntryInField,
circomSiblingsFromSiblings: () => circomSiblingsFromSiblings,
elemBytesToBigInts: () => elemBytesToBigInts,
getPath: () => getPath,
hashElems: () => hashElems,
hashElemsKey: () => hashElemsKey,
newBigIntFromBytes: () => newBigIntFromBytes,
newDataFromBytes: () => newDataFromBytes,
newHashFromBigInt: () => newHashFromBigInt,
newHashFromHex: () => newHashFromHex,
newHashFromString: () => newHashFromString,
rootFromProof: () => rootFromProof,
setBitBigEndian: () => setBitBigEndian,
siblignsFroomProof: () => siblignsFroomProof,
siblings2Bytes: () => siblings2Bytes,
str2Bytes: () => str2Bytes,
swapEndianness: () => swapEndianness,
testBit: () => testBit,
testBitBigEndian: () => testBitBigEndian,
verifyProof: () => verifyProof
});
module.exports = __toCommonJS(index_exports);
// src/constants/hash.ts
var HASH_BYTES_LENGTH = 32;
// src/constants/node.ts
var NODE_TYPE_MIDDLE = 0;
var NODE_TYPE_LEAF = 1;
var NODE_TYPE_EMPTY = 2;
var NODE_VALUE_BYTE_ARR_LENGTH = 65;
var EMPTY_NODE_VALUE = new Uint8Array(NODE_VALUE_BYTE_ARR_LENGTH);
var EMPTY_NODE_STRING = "empty";
// src/constants/data.ts
var ELEM_BYTES_LEN = 32;
var DATA_LEN = 8;
var DATA_LEN_BYTES = DATA_LEN * ELEM_BYTES_LEN;
// src/constants/proof.ts
var PROOF_FLAG_LEN = 2;
var NOT_EMPTIES_LEN = ELEM_BYTES_LEN - PROOF_FLAG_LEN;
// src/constants/field.ts
var qString = "21888242871839275222246405745257275088548364400416034343698204186575808495617";
var FIELD_SIZE = BigInt(qString);
var MAX_NUM_IN_FIELD = FIELD_SIZE - BigInt("1");
// src/lib/utils/crypto.ts
var checkBigIntInField = (bigNum) => {
return bigNum < FIELD_SIZE;
};
// src/lib/utils/bytes.ts
var bytesEqual = (b1, b2) => {
return b1.every((ele, idx) => ele === b2[idx]);
};
var swapEndianness = (bytes) => {
return bytes.slice().reverse();
};
var bytes2BinaryString = (bytes) => {
return "0b" + bytes.reduce((acc, i) => acc + i.toString(2).padStart(8, "0"), "");
};
var testBit = (bitMap, n) => {
return (bitMap[parseInt((n / 8).toString())] & 1 << n % 8) !== 0;
};
var testBitBigEndian = (bitMap, n) => {
return (bitMap[bitMap.length - parseInt(`${n / 8}`) - 1] & 1 << n % 8) !== 0;
};
var setBitBigEndian = (bitMap, n) => {
bitMap[bitMap.length - parseInt(`${n / 8}`) - 1] |= 1 << n % 8;
};
var hexTable = "0123456789abcdef";
var bytes2Hex = (u) => {
const arr = new Array(u.length * 2);
let j = 0;
u.forEach((v) => {
arr[j] = hexTable[parseInt((v >> 4).toString(10))];
arr[j + 1] = hexTable[parseInt((v & 15).toString(10))];
j += 2;
});
return arr.join("");
};
var newBigIntFromBytes = (bytes) => {
if (bytes.length !== HASH_BYTES_LENGTH) {
throw `Expected 32 bytes, found ${bytes.length} bytes`;
}
const bigNum = BigInt(bytes2BinaryString(bytes));
if (!checkBigIntInField(bigNum)) {
throw "NewBigIntFromHashBytes: Value not inside the Finite Field";
}
return bigNum;
};
var str2Bytes = (str) => new Uint8Array(str.length * 2).map((_, i) => str.charCodeAt(i));
// src/lib/utils/merkletree.ts
var getPath = (numLevels, k) => {
const path = new Array(numLevels);
for (let idx = 0; idx < numLevels; idx += 1) {
path[idx] = testBit(k, idx);
}
return path;
};
var siblings2Bytes = (siblings) => {
const siblingBytesBuff = new ArrayBuffer(HASH_BYTES_LENGTH * siblings.length);
const siblingBytes = new Uint8Array(siblingBytesBuff);
siblings.forEach((v, i) => {
siblingBytes.set(v.value, i * HASH_BYTES_LENGTH);
});
return siblingBytes;
};
// src/lib/utils/bigint.ts
var bigint2Array = (bigNum, radix) => {
return bigNum.toString(radix ? radix : 10).split("").map((n) => {
return parseInt(n);
});
};
var bigIntToUINT8Array = (bigNum) => {
const n256 = BigInt(256);
const bytes = new Uint8Array(HASH_BYTES_LENGTH);
let i = 0;
while (bigNum > BigInt(0)) {
bytes[HASH_BYTES_LENGTH - 1 - i] = Number(bigNum % n256);
bigNum = bigNum / n256;
i += 1;
}
return bytes;
};
// src/lib/hash/hash.ts
var import_js_crypto = require("@iden3/js-crypto");
var Hash = class _Hash {
// little endian
bytes;
constructor(_bytes) {
if (_bytes?.length) {
if (_bytes.length !== HASH_BYTES_LENGTH) {
throw new Error(`Expected ${HASH_BYTES_LENGTH} bytes, found ${_bytes.length} bytes`);
}
this.bytes = _bytes;
} else {
this.bytes = new Uint8Array(HASH_BYTES_LENGTH);
}
}
// returns a new copy, in little endian
get value() {
return this.bytes;
}
// bytes should be in big-endian
set value(bytes) {
if (bytes.length !== HASH_BYTES_LENGTH) {
throw `Expected 32 bytes, found ${bytes.length} bytes`;
}
this.bytes = swapEndianness(bytes);
}
string() {
return this.bigInt().toString(10);
}
hex() {
return bytes2Hex(this.bytes);
}
equals(hash) {
return bytesEqual(this.value, hash.value);
}
bigInt() {
const bytes = swapEndianness(this.value);
return BigInt(bytes2BinaryString(bytes));
}
static fromString(s) {
try {
return _Hash.fromBigInt(BigInt(s));
} catch (e) {
const deserializedHash = JSON.parse(s);
const bytes = Uint8Array.from(Object.values(deserializedHash.bytes));
return new _Hash(bytes);
}
}
static fromBigInt(i) {
if (!checkBigIntInField(i)) {
throw new Error("NewBigIntFromHashBytes: Value not inside the Finite Field");
}
const bytes = bigIntToUINT8Array(i);
return new _Hash(swapEndianness(bytes));
}
static fromHex(h) {
if (!h) {
return ZERO_HASH;
}
return new _Hash(import_js_crypto.Hex.decodeString(h));
}
toJSON() {
return this.string();
}
};
var ZERO_HASH = new Hash();
var newHashFromBigInt = (bigNum) => {
return Hash.fromBigInt(bigNum);
};
var newHashFromHex = (h) => {
return Hash.fromHex(h);
};
var newHashFromString = (decimalString) => {
return Hash.fromString(decimalString);
};
var hashElems = (e) => {
const hashBigInt = import_js_crypto.poseidon.hash(e);
return Hash.fromBigInt(hashBigInt);
};
var hashElemsKey = (k, e) => {
const hashBigInt = import_js_crypto.poseidon.hash([...e, k]);
return Hash.fromBigInt(hashBigInt);
};
var circomSiblingsFromSiblings = (siblings, levels) => {
for (let i = siblings.length; i < levels; i += 1) {
siblings.push(ZERO_HASH);
}
return siblings;
};
// src/lib/db/inMemory.ts
var InMemoryDB = class {
prefix;
_kvMap;
_currentRoot;
constructor(_prefix) {
this.prefix = _prefix;
this._kvMap = {};
this._currentRoot = ZERO_HASH;
}
async get(k) {
const kBytes = new Uint8Array([...this.prefix, ...k]);
const val = this._kvMap[kBytes.toString()] ? this._kvMap[kBytes.toString()] : void 0;
return val;
}
async put(k, n) {
const kBytes = new Uint8Array([...this.prefix, ...k]);
this._kvMap[kBytes.toString()] = n;
}
async getRoot() {
return this._currentRoot;
}
async setRoot(r) {
this._currentRoot = r;
}
};
// src/lib/utils/node.ts
var leafKey = async (k, v) => {
return hashElemsKey(BigInt(1), [k.bigInt(), v.bigInt()]);
};
var nodeValue = (type, a, b) => {
const bytes = new Uint8Array(NODE_VALUE_BYTE_ARR_LENGTH);
const kBytes = bigIntToUINT8Array(a.bigInt());
const vBytes = bigIntToUINT8Array(b.bigInt());
bytes[0] = type;
for (let idx = 1; idx < 33; idx += 1) {
bytes[idx] = kBytes[idx - 1];
}
for (let idx = 33; idx <= NODE_VALUE_BYTE_ARR_LENGTH; idx += 1) {
bytes[idx] = vBytes[idx - 33];
}
return bytes;
};
// src/lib/node/node.ts
var NodeLeaf = class {
type;
entry;
// cache used to avoid recalculating key
_key;
constructor(k, v) {
this.type = NODE_TYPE_LEAF;
this.entry = [k, v];
this._key = ZERO_HASH;
}
async getKey() {
if (this._key === ZERO_HASH) {
return await leafKey(this.entry[0], this.entry[1]);
}
return this._key;
}
get value() {
return nodeValue(this.type, this.entry[0], this.entry[1]);
}
get string() {
return `Leaf I:${this.entry[0]} D:${this.entry[1]}`;
}
};
var NodeMiddle = class {
type;
childL;
childR;
_key;
constructor(cL, cR) {
this.type = NODE_TYPE_MIDDLE;
this.childL = cL;
this.childR = cR;
this._key = ZERO_HASH;
}
async getKey() {
if (this._key === ZERO_HASH) {
return hashElems([this.childL.bigInt(), this.childR.bigInt()]);
}
return this._key;
}
get value() {
return nodeValue(this.type, this.childL, this.childR);
}
get string() {
return `Middle L:${this.childL} R:${this.childR}`;
}
};
var NodeEmpty = class {
type;
_key;
constructor() {
this.type = NODE_TYPE_EMPTY;
this._key = ZERO_HASH;
}
async getKey() {
return ZERO_HASH;
}
get value() {
return EMPTY_NODE_VALUE;
}
get string() {
return EMPTY_NODE_STRING;
}
};
// src/lib/db/localStorage.ts
var LocalStorageDB = class {
constructor(_prefix) {
this._prefix = _prefix;
const rootStr = localStorage.getItem(bytes2Hex(_prefix));
if (rootStr) {
const bytes = JSON.parse(rootStr);
this._currentRoot = new Hash(Uint8Array.from(bytes));
} else {
this._currentRoot = ZERO_HASH;
}
}
_currentRoot;
async get(k) {
const kBytes = new Uint8Array([...this._prefix, ...k]);
const key = bytes2Hex(kBytes);
const val = localStorage.getItem(key);
if (val === null) {
return void 0;
}
const obj = JSON.parse(val);
switch (obj.type) {
case NODE_TYPE_EMPTY:
return new NodeEmpty();
case NODE_TYPE_MIDDLE:
const cL = new Hash(Uint8Array.from(obj.childL));
const cR = new Hash(Uint8Array.from(obj.childR));
return new NodeMiddle(cL, cR);
case NODE_TYPE_LEAF:
const k2 = new Hash(Uint8Array.from(obj.entry[0]));
const v = new Hash(Uint8Array.from(obj.entry[1]));
return new NodeLeaf(k2, v);
}
throw `error: value found for key ${bytes2Hex(kBytes)} is not of type Node`;
}
async put(k, n) {
const kBytes = new Uint8Array([...this._prefix, ...k]);
const key = bytes2Hex(kBytes);
const toSerialize = {
type: n.type
};
if (n instanceof NodeMiddle) {
toSerialize.childL = Array.from(n.childL.bytes);
toSerialize.childR = Array.from(n.childR.bytes);
} else if (n instanceof NodeLeaf) {
toSerialize.entry = [Array.from(n.entry[0].bytes), Array.from(n.entry[1].bytes)];
}
const val = JSON.stringify(toSerialize);
localStorage.setItem(key, val);
}
async getRoot() {
return this._currentRoot;
}
async setRoot(r) {
this._currentRoot = r;
localStorage.setItem(bytes2Hex(this._prefix), JSON.stringify(Array.from(r.bytes)));
}
};
// src/lib/db/indexedDB.ts
var import_idb_keyval = require("idb-keyval");
var IndexedDBStorage = class _IndexedDBStorage {
constructor(_prefix, databaseName) {
this._prefix = _prefix;
this._currentRoot = ZERO_HASH;
this._prefixHash = bytes2Hex(_prefix);
this._store = (0, import_idb_keyval.createStore)(
`${databaseName ?? _IndexedDBStorage.storageName}-db`,
_IndexedDBStorage.storageName
);
}
static storageName = "merkle-tree";
_prefixHash;
_store;
_currentRoot;
async get(k) {
const kBytes = new Uint8Array([...this._prefix, ...k]);
const key = bytes2Hex(kBytes);
const obj = await (0, import_idb_keyval.get)(key, this._store);
if (obj === null || obj === void 0) {
return void 0;
}
if (obj.type === NODE_TYPE_EMPTY) {
return new NodeEmpty();
}
if (obj.type === NODE_TYPE_MIDDLE) {
const cL = new Hash(Uint8Array.from(obj.childL.bytes));
const cR = new Hash(Uint8Array.from(obj.childR.bytes));
return new NodeMiddle(cL, cR);
}
if (obj.type === NODE_TYPE_LEAF) {
const k2 = new Hash(Uint8Array.from(obj.entry[0].bytes));
const v = new Hash(Uint8Array.from(obj.entry[1].bytes));
return new NodeLeaf(k2, v);
}
throw new Error(`error: value found for key ${key} is not of type Node`);
}
async put(k, n) {
const kBytes = new Uint8Array([...this._prefix, ...k]);
const key = bytes2Hex(kBytes);
await (0, import_idb_keyval.set)(key, n, this._store);
}
async getRoot() {
if (!this._currentRoot.equals(ZERO_HASH)) {
return this._currentRoot;
}
const root = await (0, import_idb_keyval.get)(this._prefixHash, this._store);
if (!root) {
this._currentRoot = ZERO_HASH;
} else {
this._currentRoot = new Hash(root.bytes);
}
return this._currentRoot;
}
async setRoot(r) {
await (0, import_idb_keyval.set)(this._prefixHash, r, this._store);
this._currentRoot = r;
}
};
// src/lib/entry/elemBytes.ts
var ElemBytes = class {
// Little Endian
_bytes;
constructor() {
this._bytes = new Uint8Array(ELEM_BYTES_LEN);
}
get value() {
return this._bytes;
}
set value(b) {
this._bytes = b;
}
bigInt() {
return newBigIntFromBytes(swapEndianness(this._bytes));
}
string() {
const hexStr = bytes2Hex(this._bytes.slice(0, 4));
return `${hexStr}...`;
}
};
// src/lib/entry/data.ts
var Data = class {
_value;
constructor() {
this._value = new Array(DATA_LEN);
}
get value() {
return this._value;
}
set value(_v) {
if (_v.length !== DATA_LEN) {
throw `expected bytes length to be ${DATA_LEN}, got ${_v.length}`;
}
this._value = _v;
}
bytes() {
const b = new Uint8Array(DATA_LEN * ELEM_BYTES_LEN);
for (let idx = 0; idx < DATA_LEN; idx += 1) {
this._value[idx].value.forEach((v, _idx) => {
b[idx * ELEM_BYTES_LEN + _idx] = v;
});
}
return b;
}
equal(d2) {
return bytesEqual(this._value[0].value, d2.value[0].value) && bytesEqual(this._value[1].value, d2.value[1].value) && bytesEqual(this._value[2].value, d2.value[2].value) && bytesEqual(this._value[3].value, d2.value[3].value);
}
};
var newDataFromBytes = (bytes) => {
if (bytes.length !== DATA_LEN_BYTES) {
throw `expected bytes length to be ${DATA_LEN_BYTES}, got ${bytes.length}`;
}
const d = new Data();
const arr = new Array(DATA_LEN_BYTES);
for (let i = 0; i < DATA_LEN; i += 1) {
const tmp = new ElemBytes();
tmp.value = bytes.slice(i * ELEM_BYTES_LEN, (i + 1) * DATA_LEN_BYTES);
arr[i] = tmp;
}
d.value = arr;
return d;
};
// src/lib/entry/entry.ts
var Entry = class _Entry {
_data;
_hIndex;
_hValue;
constructor(_data) {
this._data = _data ? _data : new Data();
this._hIndex = ZERO_HASH;
this._hValue = ZERO_HASH;
}
get data() {
return this._data;
}
get index() {
return this._data.value.slice(0, 4);
}
get value() {
return this._data.value.slice(4, 8);
}
async hIndex() {
if (this._hIndex === ZERO_HASH) {
return hashElems(elemBytesToBigInts(this.index));
}
return this._hIndex;
}
async hValue() {
if (this._hValue === ZERO_HASH) {
return hashElems(elemBytesToBigInts(this.value));
}
return this._hValue;
}
hiHv() {
return (async () => {
const hi = await this.hIndex();
const hv = await this.hValue();
return { hi, hv };
})();
}
bytes() {
return this._data.value;
}
equal(e2) {
return this._data.equal(e2.data);
}
clone() {
return new _Entry(this._data);
}
};
var elemBytesToBigInts = (es) => {
const bigInts = es.map((e) => {
return e.bigInt();
});
return bigInts;
};
var checkEntryInField = (e) => {
const bigInts = elemBytesToBigInts(e.data.value);
let flag = true;
bigInts.forEach((b) => {
if (!checkBigIntInField(b)) {
flag = false;
}
});
return flag;
};
// src/lib/errors/merkletree.ts
var ErrNodeKeyAlreadyExists = "key already exists";
var ErrKeyNotFound = "Key not found in the MerkleTree";
var ErrNodeBytesBadSize = "node data has incorrect size in the DB";
var ErrReachedMaxLevel = "reached maximum level of the merkle tree";
var ErrInvalidNodeFound = "found an invalid node in the DB";
var ErrInvalidProofBytes = "the serialized proof is invalid";
var ErrInvalidDBValue = "the value in the DB is invalid";
var ErrEntryIndexAlreadyExists = "the entry index already exists in the tree";
var ErrNotWritable = "Merkle Tree not writable";
// src/lib/errors/db.ts
var ErrNotFound = "key not found";
// src/lib/merkletree/circom.ts
var CircomVerifierProof = class {
root;
siblings;
oldKey;
oldValue;
isOld0;
key;
value;
// 0: inclusion, 1: non inclusion
fnc;
constructor(_root = ZERO_HASH, _siblings = [], _oldKey = ZERO_HASH, _oldValue = ZERO_HASH, _isOld0 = false, _key = ZERO_HASH, _value = ZERO_HASH, _fnc = 0) {
this.root = _root;
this.siblings = _siblings;
this.oldKey = _oldKey;
this.oldValue = _oldValue;
this.isOld0 = _isOld0;
this.key = _key;
this.value = _value;
this.fnc = _fnc;
}
};
var CircomProcessorProof = class {
oldRoot;
newRoot;
siblings;
oldKey;
oldValue;
newKey;
newValue;
isOld0;
// 0: NOP, 1: Update, 2: Insert, 3: Delete
fnc;
constructor(_oldRoot = ZERO_HASH, _newRoot = ZERO_HASH, _siblings = [], _oldKey = ZERO_HASH, _oldValue = ZERO_HASH, _newKey = ZERO_HASH, _newValue = ZERO_HASH, _isOld0 = false, _fnc = 0) {
this.oldRoot = _oldRoot;
this.newRoot = _newRoot;
this.siblings = _siblings;
this.oldKey = _oldKey;
this.oldValue = _oldValue;
this.newKey = _newKey;
this.newValue = _newValue;
this.isOld0 = _isOld0;
this.fnc = _fnc;
}
};
// src/lib/errors/proof.ts
var ErrNodeAuxNonExistAgainstHIndex = "non-existence proof being checked against hIndex equal to nodeAux";
// src/lib/merkletree/proof.ts
var Proof = class _Proof {
existence;
depth;
// notEmpties is a bitmap of non-empty siblings found in siblings
notEmpties;
siblings;
nodeAux;
constructor(obj) {
this.existence = obj?.existence ?? false;
this.depth = 0;
this.nodeAux = obj?.nodeAux;
const { siblings, notEmpties } = this.reduceSiblings(obj?.siblings);
this.siblings = siblings;
this.notEmpties = notEmpties;
}
bytes() {
let bsLen = PROOF_FLAG_LEN + this.notEmpties.length + ELEM_BYTES_LEN * this.siblings.length;
if (typeof this.nodeAux !== "undefined") {
bsLen += 2 * ELEM_BYTES_LEN;
}
const arrBuff = new ArrayBuffer(bsLen);
const bs = new Uint8Array(arrBuff);
if (!this.existence) {
bs[0] |= 1;
}
bs[1] = this.depth;
bs.set(this.notEmpties, PROOF_FLAG_LEN);
const siblingBytes = siblings2Bytes(this.siblings);
bs.set(siblingBytes, this.notEmpties.length + PROOF_FLAG_LEN);
if (typeof this.nodeAux !== "undefined") {
bs[0] |= 2;
bs.set(this.nodeAux.key.value, bs.length - 2 * ELEM_BYTES_LEN);
bs.set(this.nodeAux.value.value, bs.length - 1 * ELEM_BYTES_LEN);
}
return bs;
}
toJSON() {
return {
existence: this.existence,
siblings: this.allSiblings().map((s) => s.toJSON()),
node_aux: this.nodeAux ? {
key: this.nodeAux.key.toJSON(),
value: this.nodeAux.value.toJSON()
} : void 0
};
}
reduceSiblings(siblings) {
const reducedSiblings = [];
const notEmpties = new Uint8Array(NOT_EMPTIES_LEN);
if (!siblings) {
return { siblings: reducedSiblings, notEmpties };
}
for (let i = 0; i < siblings.length; i++) {
const sibling = siblings[i];
if (JSON.stringify(siblings[i]) !== JSON.stringify(ZERO_HASH)) {
setBitBigEndian(notEmpties, i);
reducedSiblings.push(sibling);
this.depth = i + 1;
}
}
return { notEmpties, siblings: reducedSiblings };
}
static fromJSON(obj) {
let nodeAux = void 0;
const nodeAuxJson = obj.node_aux ?? obj.nodeAux;
if (nodeAuxJson) {
nodeAux = {
key: Hash.fromString(nodeAuxJson.key),
value: Hash.fromString(nodeAuxJson.value)
};
}
const existence = obj.existence ?? false;
const siblings = obj.siblings.map((s) => Hash.fromString(s));
return new _Proof({ existence, nodeAux, siblings });
}
allSiblings() {
return _Proof.buildAllSiblings(this.depth, this.notEmpties, this.siblings);
}
static buildAllSiblings(depth, notEmpties, siblings) {
let sibIdx = 0;
const allSiblings = [];
for (let i = 0; i < depth; i += 1) {
if (testBitBigEndian(notEmpties, i)) {
allSiblings.push(siblings[sibIdx]);
sibIdx += 1;
} else {
allSiblings.push(ZERO_HASH);
}
}
return allSiblings;
}
};
var siblignsFroomProof = (proof) => {
return proof.allSiblings();
};
var verifyProof = async (rootKey, proof, k, v) => {
try {
const rFromProof = await rootFromProof(proof, k, v);
return bytesEqual(rootKey.value, rFromProof.value);
} catch (err) {
if (err === ErrNodeAuxNonExistAgainstHIndex) {
return false;
}
throw err;
}
};
var rootFromProof = async (proof, k, v) => {
const kHash = Hash.fromBigInt(k);
const vHash = Hash.fromBigInt(v);
let midKey;
if (proof.existence) {
midKey = await leafKey(kHash, vHash);
} else {
if (typeof proof.nodeAux === "undefined") {
midKey = ZERO_HASH;
} else {
const nodeAux = proof.nodeAux;
if (bytesEqual(kHash.value, nodeAux.key.value)) {
throw ErrNodeAuxNonExistAgainstHIndex;
}
midKey = await leafKey(nodeAux.key, nodeAux.value);
}
}
const siblings = proof.allSiblings();
const path = getPath(siblings.length, kHash.value);
for (let i = siblings.length - 1; i >= 0; i -= 1) {
if (path[i]) {
midKey = await new NodeMiddle(siblings[i], midKey).getKey();
} else {
midKey = await new NodeMiddle(midKey, siblings[i]).getKey();
}
}
return midKey;
};
// src/lib/merkletree/merkletree.ts
var Merkletree = class {
_db;
_root;
_writable;
_maxLevel;
constructor(_db, _writable, _maxLevels) {
this._db = _db;
this._writable = _writable;
this._maxLevel = _maxLevels;
}
async root() {
if (!this._root) {
this._root = await this._db.getRoot();
}
return this._root;
}
get maxLevels() {
return this._maxLevel;
}
async add(k, v) {
if (!this._writable) {
throw ErrNotWritable;
}
this._root = await this.root();
const kHash = Hash.fromBigInt(k);
const vHash = Hash.fromBigInt(v);
const newNodeLeaf = new NodeLeaf(kHash, vHash);
const path = getPath(this.maxLevels, kHash.value);
const newRootKey = await this.addLeaf(newNodeLeaf, this._root, 0, path);
this._root = newRootKey;
await this._db.setRoot(this._root);
}
async updateNode(n) {
if (!this._writable) {
throw ErrNotWritable;
}
if (n.type === NODE_TYPE_EMPTY) {
return await n.getKey();
}
const k = await n.getKey();
await this._db.put(k.value, n);
return k;
}
async addNode(n) {
if (!this._writable) {
throw ErrNotWritable;
}
if (n.type === NODE_TYPE_EMPTY) {
return await n.getKey();
}
const k = await n.getKey();
await this._db.put(k.value, n);
return k;
}
async addEntry(e) {
if (!this._writable) {
throw ErrNotWritable;
}
if (!checkEntryInField(e)) {
throw "elements not inside the finite field over r";
}
this._root = await this._db.getRoot();
const hIndex = await e.hIndex();
const hValue = await e.hValue();
const newNodeLeaf = new NodeLeaf(hIndex, hValue);
const path = getPath(this.maxLevels, hIndex.value);
const newRootKey = await this.addLeaf(newNodeLeaf, this._root, 0, path);
this._root = newRootKey;
await this._db.setRoot(this._root);
}
async pushLeaf(newLeaf, oldLeaf, lvl, pathNewLeaf, pathOldLeaf) {
if (lvl > this._maxLevel - 2) {
throw new Error(ErrReachedMaxLevel);
}
let newNodeMiddle;
if (pathNewLeaf[lvl] === pathOldLeaf[lvl]) {
const nextKey = await this.pushLeaf(newLeaf, oldLeaf, lvl + 1, pathNewLeaf, pathOldLeaf);
if (pathNewLeaf[lvl]) {
newNodeMiddle = new NodeMiddle(new Hash(), nextKey);
} else {
newNodeMiddle = new NodeMiddle(nextKey, new Hash());
}
return await this.addNode(newNodeMiddle);
}
const oldLeafKey = await oldLeaf.getKey();
const newLeafKey = await newLeaf.getKey();
if (pathNewLeaf[lvl]) {
newNodeMiddle = new NodeMiddle(oldLeafKey, newLeafKey);
} else {
newNodeMiddle = new NodeMiddle(newLeafKey, oldLeafKey);
}
await this.addNode(newLeaf);
return await this.addNode(newNodeMiddle);
}
async addLeaf(newLeaf, key, lvl, path) {
if (lvl > this._maxLevel - 1) {
throw new Error(ErrReachedMaxLevel);
}
const n = await this.getNode(key);
if (typeof n === "undefined") {
throw ErrNotFound;
}
switch (n.type) {
case NODE_TYPE_EMPTY:
return this.addNode(newLeaf);
case NODE_TYPE_LEAF: {
const nKey = n.entry[0];
const newLeafKey = newLeaf.entry[0];
if (bytesEqual(nKey.value, newLeafKey.value)) {
throw ErrEntryIndexAlreadyExists;
}
const pathOldLeaf = getPath(this.maxLevels, nKey.value);
return this.pushLeaf(newLeaf, n, lvl, path, pathOldLeaf);
}
case NODE_TYPE_MIDDLE: {
n;
let newNodeMiddle;
if (path[lvl]) {
const nextKey = await this.addLeaf(newLeaf, n.childR, lvl + 1, path);
newNodeMiddle = new NodeMiddle(n.childL, nextKey);
} else {
const nextKey = await this.addLeaf(newLeaf, n.childL, lvl + 1, path);
newNodeMiddle = new NodeMiddle(nextKey, n.childR);
}
return this.addNode(newNodeMiddle);
}
default: {
throw ErrInvalidNodeFound;
}
}
}
async get(k) {
const kHash = Hash.fromBigInt(k);
const path = getPath(this.maxLevels, kHash.value);
let nextKey = await this.root();
const siblings = [];
for (let i = 0; i < this.maxLevels; i++) {
const n = await this.getNode(nextKey);
if (typeof n === "undefined") {
throw ErrKeyNotFound;
}
switch (n.type) {
case NODE_TYPE_EMPTY:
return {
key: BigInt("0"),
value: BigInt("0"),
siblings
};
case NODE_TYPE_LEAF:
return {
key: n.entry[0].bigInt(),
value: n.entry[1].bigInt(),
siblings
};
case NODE_TYPE_MIDDLE:
if (path[i]) {
nextKey = n.childR;
siblings.push(n.childL);
} else {
nextKey = n.childL;
siblings.push(n.childR);
}
break;
default:
throw ErrInvalidNodeFound;
}
}
throw new Error(ErrReachedMaxLevel);
}
async update(k, v) {
if (!this._writable) {
throw ErrNotWritable;
}
if (!checkBigIntInField(k)) {
throw "key not inside the finite field";
}
if (!checkBigIntInField(v)) {
throw "key not inside the finite field";
}
const kHash = Hash.fromBigInt(k);
const vHash = Hash.fromBigInt(v);
const path = getPath(this.maxLevels, kHash.value);
const cp = new CircomProcessorProof();
cp.fnc = 1;
cp.oldRoot = await this.root();
cp.oldKey = kHash;
cp.newKey = kHash;
cp.newValue = vHash;
let nextKey = await this.root();
const siblings = [];
for (let i = 0; i < this.maxLevels; i += 1) {
const n = await this.getNode(nextKey);
if (typeof n === "undefined") {
throw ErrNotFound;
}
switch (n.type) {
case NODE_TYPE_EMPTY:
throw ErrKeyNotFound;
case NODE_TYPE_LEAF:
if (bytesEqual(kHash.value, n.entry[0].value)) {
cp.oldValue = n.entry[1];
cp.siblings = circomSiblingsFromSiblings([...siblings], this.maxLevels);
const newNodeLeaf = new NodeLeaf(kHash, vHash);
await this.updateNode(newNodeLeaf);
const newRootKey = await this.recalculatePathUntilRoot(path, newNodeLeaf, siblings);
this._root = newRootKey;
await this._db.setRoot(newRootKey);
cp.newRoot = newRootKey;
return cp;
}
break;
case NODE_TYPE_MIDDLE:
if (path[i]) {
nextKey = n.childR;
siblings.push(n.childL);
} else {
nextKey = n.childL;
siblings.push(n.childR);
}
break;
default:
throw ErrInvalidNodeFound;
}
}
throw ErrKeyNotFound;
}
async getNode(k) {
if (bytesEqual(k.value, ZERO_HASH.value)) {
return new NodeEmpty();
}
return await this._db.get(k.value);
}
async recalculatePathUntilRoot(path, node, siblings) {
for (let i = siblings.length - 1; i >= 0; i -= 1) {
const nodeKey2 = await node.getKey();
if (path[i]) {
node = new NodeMiddle(siblings[i], nodeKey2);
} else {
node = new NodeMiddle(nodeKey2, siblings[i]);
}
await this.addNode(node);
}
const nodeKey = await node.getKey();
return nodeKey;
}
// Delete removes the specified Key from the MerkleTree and updates the path
// from the deleted key to the Root with the new values. This method removes
// the key from the MerkleTree, but does not remove the old nodes from the
// key-value database; this means that if the tree is accessed by an old Root
// where the key was not deleted yet, the key will still exist. If is desired
// to remove the key-values from the database that are not under the current
// Root, an option could be to dump all the leaves (using mt.DumpLeafs) and
// import them in a new MerkleTree in a new database (using
// mt.ImportDumpedLeafs), but this will loose all the Root history of the
// MerkleTree
async delete(k) {
if (!this._writable) {
throw ErrNotWritable;
}
const kHash = Hash.fromBigInt(k);
const path = getPath(this.maxLevels, kHash.value);
let nextKey = this._root;
const siblings = [];
for (let i = 0; i < this._maxLevel; i += 1) {
const n = await this.getNode(nextKey);
if (typeof n === "undefined") {
throw ErrNotFound;
}
switch (n.type) {
case NODE_TYPE_EMPTY:
throw ErrKeyNotFound;
case NODE_TYPE_LEAF:
if (bytesEqual(kHash.bytes, n.entry[0].value)) {
await this.rmAndUpload(path, kHash, siblings);
return;
}
throw ErrKeyNotFound;
case NODE_TYPE_MIDDLE:
if (path[i]) {
nextKey = n.childR;
siblings.push(n.childL);
} else {
nextKey = n.childL;
siblings.push(n.childR);
}
break;
default:
throw ErrInvalidNodeFound;
}
}
throw ErrKeyNotFound;
}
async rmAndUpload(path, kHash, siblings) {
if (siblings.length === 0) {
this._root = ZERO_HASH;
await this._db.setRoot(this._root);
return;
}
const toUpload = siblings[siblings.length - 1];
if (siblings.length < 2) {
this._root = siblings[0];
await this._db.setRoot(this._root);
}
const nearestSibling = await this._db.get(toUpload.bytes);
if (nearestSibling?.type === NODE_TYPE_MIDDLE) {
let newNode;
if (path[siblings.length - 1]) {
newNode = new NodeMiddle(toUpload, ZERO_HASH);
} else {
newNode = new NodeMiddle(ZERO_HASH, toUpload);
}
await this.addNode(newNode);
const newRootKey = await this.recalculatePathUntilRoot(
path,
newNode,
siblings.slice(0, siblings.length - 1)
);
this._root = newRootKey;
await this._db.setRoot(this._root);
return;
}
for (let i = siblings.length - 2; i >= 0; i -= 1) {
if (!bytesEqual(siblings[i].value, ZERO_HASH.value)) {
let newNode;
if (path[i]) {
newNode = new NodeMiddle(siblings[i], toUpload);
} else {
newNode = new NodeMiddle(toUpload, siblings[i]);
}
await this.addNode(newNode);
const newRootKey = await this.recalculatePathUntilRoot(path, newNode, siblings.slice(0, i));
this._root = newRootKey;
await this._db.setRoot(this._root);
break;
}
if (i === 0) {
this._root = toUpload;
await this._db.setRoot(this._root);
break;
}
}
}
async recWalk(key, f) {
const n = await this.getNode(key);
if (typeof n === "undefined") {
throw ErrNotFound;
}
switch (n.type) {
case NODE_TYPE_EMPTY:
await f(n);
break;
case NODE_TYPE_LEAF:
await f(n);
break;
case NODE_TYPE_MIDDLE:
await f(n);
await this.recWalk(n.childL, f);
await this.recWalk(n.childR, f);
break;
default:
throw ErrInvalidNodeFound;
}
}
async walk(rootKey, f) {
if (bytesEqual(rootKey.value, ZERO_HASH.value)) {
rootKey = await this.root();
}
await this.recWalk(rootKey, f);
}
async generateCircomVerifierProof(k, rootKey) {
const cp = await this.generateSCVerifierProof(k, rootKey);
cp.siblings = circomSiblingsFromSiblings(cp.siblings, this.maxLevels);
return cp;
}
async generateSCVerifierProof(k, rootKey) {
if (bytesEqual(rootKey.value, ZERO_HASH.value)) {
rootKey = await this.root();
}
const { proof, value } = await this.generateProof(k, rootKey);
const cp = new CircomVerifierProof();
cp.root = rootKey;
cp.siblings = proof.allSiblings();
if (typeof proof.nodeAux !== "undefined") {
cp.oldKey = proof.nodeAux.key;
cp.oldValue = proof.nodeAux.value;
} else {
cp.oldKey = ZERO_HASH;
cp.oldValue = ZERO_HASH;
}
cp.key = Hash.fromBigInt(k);
cp.value = Hash.fromBigInt(value);
if (proof.existence) {
cp.fnc = 0;
} else {
cp.fnc = 1;
}
return cp;
}
async generateProof(k, rootKey) {
let siblingKey;
const kHash = Hash.fromBigInt(k);
const path = getPath(this.maxLevels, kHash.value);
if (!rootKey) {
rootKey = await this.root();
}
let nextKey = rootKey;
let depth = 0;
let existence = false;
const siblings = [];
let nodeAux;
for (depth = 0; depth < this.maxLevels; depth += 1) {
const n = await this.getNode(nextKey);
if (typeof n === "undefined") {
throw ErrNotFound;
}
switch (n.type) {
case NODE_TYPE_EMPTY:
return {
proof: new Proof({
existence,
nodeAux,
siblings
}),
value: BigInt("0")
};
case NODE_TYPE_LEAF:
if (bytesEqual(kHash.value, n.entry[0].value)) {
existence = true;
return {
proof: new Proof({
existence,
nodeAux,
siblings
}),
value: n.entry[1].bigInt()
};
}
nodeAux = {
key: n.entry[0],
value: n.entry[1]
};
return {
proof: new Proof({
existence,
nodeAux,
siblings
}),
value: n.entry[1].bigInt()
};
case NODE_TYPE_MIDDLE:
if (path[depth]) {
nextKey = n.childR;
siblingKey = n.childL;
} else {
nextKey = n.childL;
siblingKey = n.childR;
}
break;
default:
throw ErrInvalidNodeFound;
}
siblings.push(siblingKey);
}
throw ErrKeyNotFound;
}
async addAndGetCircomProof(k, v) {
const cp = new CircomProcessorProof();
cp.fnc = 2;
cp.oldRoot = await this.root();
let key = BigInt("0");
let value = BigInt("0");
let siblings = [];
try {
const res = await this.get(k);
key = res.key;
value = res.value;
siblings = res.siblings;
} catch (err) {
if (err !== ErrKeyNotFound) {
throw err;
}
}
if (typeof key === "undefined" || typeof value === "undefined") {
throw "key/value undefined";
}
cp.oldKey = Hash.fromBigInt(key);
cp.oldValue = Hash.fromBigInt(value);
if (bytesEqual(cp.oldKey.value, ZERO_HASH.value)) {
cp.isOld0 = true;
}
cp.siblings = circomSiblingsFromSiblings(siblings, this.maxLevels);
await this.add(k, v);
cp.newKey = Hash.fromBigInt(k);
cp.newValue = Hash.fromBigInt(v);
cp.newRoot = await this.root();
return cp;
}
// NOTE: for now it only prints to console, will be updated in future
async graphViz(rootKey) {
let cnt = 0;
await this.walk(rootKey, async (n) => {
const k = await n.getKey();
let lr;
let emptyNodes;
switch (n.type) {
case NODE_TYPE_EMPTY:
break;
case NODE_TYPE_LEAF:
console.log(`"${k.string()}" [style=filled]`);
break;
case NODE_TYPE_MIDDLE:
lr = [n.childL.string(), n.childR.string()];
emptyNodes = "";
lr.forEach((s, i) => {
if (s === "0") {
lr[i] = `empty${cnt}`;
emptyNodes += `"${lr[i]}" [style=dashed,label=0];
`;
cnt += 1;
}
});
console.log(`"${k.string()}" -> {"${lr[1]}"}`);
console.log(emptyNodes);
break;
default:
break;
}
});
console.log(`}
`);
}
async printGraphViz(rootKey) {
if (bytesEqual(rootKey.value, ZERO_HASH.value)) {
rootKey = await this.root();
}
console.log(
`--------
GraphViz of the MerkleTree with RootKey ${rootKey.bigInt().toString(10)}
`
);
await this.graphViz(ZERO_HASH);
console.log(
`End of GraphViz of the MerkleTree with RootKey ${rootKey.bigInt().toString(10)}
--------
`
);
}
};
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
CircomProcessorProof,
CircomVerifierProof,
DATA_LEN,
DATA_LEN_BYTES,
Data,
ELEM_BYTES_LEN,
EMPTY_NODE_STRING,
EMPTY_NODE_VALUE,
ElemBytes,
Entry,
ErrEntryIndexAlreadyExists,
ErrInvalidDBValue,
ErrInvalidNodeFound,
ErrInvalidProofBytes,
ErrKeyNotFound,
ErrNodeBytesBadSize,
ErrNodeKeyAlreadyExists,
ErrNotFound,
ErrNotWritable,
ErrReachedMaxLevel,
FIELD_SIZE,
HASH_BYTES_LENGTH,
Hash,
InMemoryDB,
IndexedDBStorage,
LocalStorageDB,
MAX_NUM_IN_FIELD,
Merkletree,
NODE_TYPE_EMPTY,
NODE_TYPE_LEAF,
NODE_TYPE_MIDDLE,
NODE_VALUE_BYTE_ARR_LENGTH,
NOT_EMPTIES_LEN,
NodeEmpty,
NodeLeaf,
NodeMiddle,
PROOF_FLAG_LEN,
Proof,
ZERO_HASH,
bigIntToUINT8Array,
bigint2Array,
bytes2BinaryString,
bytes2Hex,
bytesEqual,
checkBigIntInField,
checkEntryInField,
circomSiblingsFromSiblings,
elemBytesToBigInts,
getPath,
hashElems,
hashElemsKey,
newBigIntFromBytes,
newDataFromBytes,
newHashFromBigInt,
newHashFromHex,
newHashFromString,
rootFromProof,
setBitBigEndian,
siblignsFroomProof,
siblings2Bytes,
str2Bytes,
swapEndianness,
testBit,
testBitBigEndian,
verifyProof
});
//# sourceMappingURL=index.cjs.map