UNPKG

@iden3/js-merkletree

Version:

javascript sparse merkle tree library

4 lines 79.8 kB
{ "version": 3, "sources": ["../../../src/index.ts", "../../../src/constants/hash.ts", "../../../src/constants/node.ts", "../../../src/constants/data.ts", "../../../src/constants/proof.ts", "../../../src/constants/field.ts", "../../../src/lib/utils/crypto.ts", "../../../src/lib/utils/bytes.ts", "../../../src/lib/utils/merkletree.ts", "../../../src/lib/utils/bigint.ts", "../../../src/lib/hash/hash.ts", "../../../src/lib/db/inMemory.ts", "../../../src/lib/utils/node.ts", "../../../src/lib/node/node.ts", "../../../src/lib/db/localStorage.ts", "../../../src/lib/db/indexedDB.ts", "../../../src/lib/entry/elemBytes.ts", "../../../src/lib/entry/data.ts", "../../../src/lib/entry/entry.ts", "../../../src/lib/errors/merkletree.ts", "../../../src/lib/errors/db.ts", "../../../src/lib/merkletree/circom.ts", "../../../src/lib/errors/proof.ts", "../../../src/lib/merkletree/proof.ts", "../../../src/lib/merkletree/merkletree.ts"], "sourcesContent": ["export * from './constants';\nexport * from './lib';\nexport * from './types';\n", "export const HASH_BYTES_LENGTH = 32;\n", "// middle node.ts with children\nimport { NodeType } from '../types';\n\nexport const NODE_TYPE_MIDDLE: NodeType = 0;\n// Leaf node.ts with a key and a value\nexport const NODE_TYPE_LEAF: NodeType = 1;\n// empty node.ts\nexport const NODE_TYPE_EMPTY: NodeType = 2;\n\nexport const NODE_VALUE_BYTE_ARR_LENGTH = 65;\n\nexport const EMPTY_NODE_VALUE = new Uint8Array(NODE_VALUE_BYTE_ARR_LENGTH);\n\nexport const EMPTY_NODE_STRING = 'empty';\n", "export const ELEM_BYTES_LEN = 32;\nexport const DATA_LEN = 8;\nexport const DATA_LEN_BYTES = DATA_LEN * ELEM_BYTES_LEN;\n", "import { ELEM_BYTES_LEN } from './data';\nexport const PROOF_FLAG_LEN = 2;\nexport const NOT_EMPTIES_LEN = ELEM_BYTES_LEN - PROOF_FLAG_LEN;\n", "const qString = '21888242871839275222246405745257275088548364400416034343698204186575808495617';\n\nexport const FIELD_SIZE = BigInt(qString);\nexport const MAX_NUM_IN_FIELD = FIELD_SIZE - BigInt('1');\n", "import { FIELD_SIZE } from '../../constants/field';\n\nexport const checkBigIntInField = (bigNum: bigint): boolean => {\n return bigNum < FIELD_SIZE;\n};\n", "import { HASH_BYTES_LENGTH } from '../../constants';\nimport { Bytes } from '../../types/bytes';\nimport { checkBigIntInField } from './crypto';\n\nexport const bytesEqual: (b1: Bytes, b2: Bytes) => boolean = (b1, b2) => {\n return b1.every((ele, idx) => ele === b2[idx]);\n};\n\n// TODO: might be make this generic over typed arrays?\nexport const swapEndianness = (bytes: Bytes): Bytes => {\n return bytes.slice().reverse();\n};\n\nexport const bytes2BinaryString = (bytes: Bytes): string => {\n return '0b' + bytes.reduce((acc, i) => acc + i.toString(2).padStart(8, '0'), '');\n};\n\nexport const testBit = (bitMap: Bytes, n: number): boolean => {\n return (bitMap[parseInt((n / 8).toString())] & (1 << n % 8)) !== 0;\n};\n\nexport const testBitBigEndian = (bitMap: Bytes, n: number): boolean => {\n return (bitMap[bitMap.length - parseInt(`${n / 8}`) - 1] & (1 << n % 8)) !== 0;\n};\n\n// SetBitBigEndian sets the bit n in the bitmap to 1, in Big Endian.\nexport const setBitBigEndian = (bitMap: Bytes, n: number): void => {\n bitMap[bitMap.length - parseInt(`${n / 8}`) - 1] |= 1 << n % 8;\n};\n\nconst hexTable = '0123456789abcdef';\nexport const bytes2Hex = (u: Bytes): string => {\n const arr = new Array(u.length * 2);\n let j = 0;\n u.forEach((v) => {\n arr[j] = hexTable[parseInt((v >> 4).toString(10))];\n arr[j + 1] = hexTable[parseInt((v & 15).toString(10))];\n j += 2;\n });\n\n return arr.join('');\n};\n\n// NOTE: `bytes` should be big endian\n// bytes received from Hash.value getter are safe to use since their endianness is swapped, for the same reason the private Hash.bytes { stored in little endian } should never be used\nexport const newBigIntFromBytes = (bytes: Bytes): bigint => {\n if (bytes.length !== HASH_BYTES_LENGTH) {\n throw `Expected 32 bytes, found ${bytes.length} bytes`;\n }\n\n const bigNum = BigInt(bytes2BinaryString(bytes));\n if (!checkBigIntInField(bigNum)) {\n throw 'NewBigIntFromHashBytes: Value not inside the Finite Field';\n }\n\n return bigNum;\n};\n\nexport const str2Bytes = (str: string): Bytes =>\n new Uint8Array(str.length * 2).map((_, i) => str.charCodeAt(i));\n", "// const siblingBytes = bs.slice(this.notEmpties.length + PROOF_FLAG_LEN);\nimport { HASH_BYTES_LENGTH } from '../../constants';\nimport { Bytes } from '../../types';\nimport { Path, Siblings } from '../../types/merkletree';\nimport { testBit } from './bytes';\n\nexport const getPath = (numLevels: number, k: Bytes): Path => {\n const path = new Array<boolean>(numLevels);\n\n for (let idx = 0; idx < numLevels; idx += 1) {\n path[idx] = testBit(k, idx);\n }\n return path;\n};\n\nexport const siblings2Bytes = (siblings: Siblings): Bytes => {\n const siblingBytesBuff = new ArrayBuffer(HASH_BYTES_LENGTH * siblings.length);\n const siblingBytes = new Uint8Array(siblingBytesBuff);\n siblings.forEach((v, i) => {\n siblingBytes.set(v.value, i * HASH_BYTES_LENGTH);\n });\n\n return siblingBytes;\n};\n", "import { HASH_BYTES_LENGTH } from '../../constants';\nimport { Bytes } from '../../types';\n\nexport const bigint2Array = (bigNum: bigint, radix?: number): Array<number> => {\n return bigNum\n .toString(radix ? radix : 10)\n .split('')\n .map((n) => {\n return parseInt(n);\n });\n};\n\nexport const bigIntToUINT8Array = (bigNum: bigint): Bytes => {\n const n256 = BigInt(256);\n const bytes = new Uint8Array(HASH_BYTES_LENGTH);\n let i = 0;\n while (bigNum > BigInt(0)) {\n bytes[HASH_BYTES_LENGTH - 1 - i] = Number(bigNum % n256);\n bigNum = bigNum / n256;\n i += 1;\n }\n return bytes;\n};\n", "import { HASH_BYTES_LENGTH } from '../../constants';\nimport {\n bytesEqual,\n swapEndianness,\n bytes2Hex,\n bytes2BinaryString,\n checkBigIntInField,\n bigIntToUINT8Array\n} from '../utils';\nimport { Bytes, IHash, Siblings } from '../../types';\nimport { Hex, poseidon } from '@iden3/js-crypto';\n\nexport class Hash implements IHash {\n // little endian\n bytes: Bytes;\n\n constructor(_bytes?: Bytes) {\n if (_bytes?.length) {\n if (_bytes.length !== HASH_BYTES_LENGTH) {\n throw new Error(`Expected ${HASH_BYTES_LENGTH} bytes, found ${_bytes.length} bytes`);\n }\n this.bytes = _bytes;\n } else {\n this.bytes = new Uint8Array(HASH_BYTES_LENGTH);\n }\n }\n\n // returns a new copy, in little endian\n get value(): Bytes {\n return this.bytes;\n }\n\n // bytes should be in big-endian\n set value(bytes: Bytes) {\n if (bytes.length !== HASH_BYTES_LENGTH) {\n throw `Expected 32 bytes, found ${bytes.length} bytes`;\n }\n this.bytes = swapEndianness(bytes);\n }\n\n string(): string {\n return this.bigInt().toString(10);\n }\n\n hex(): string {\n return bytes2Hex(this.bytes);\n }\n\n equals(hash: Hash): boolean {\n return bytesEqual(this.value, hash.value);\n }\n\n bigInt(): bigint {\n const bytes = swapEndianness(this.value);\n return BigInt(bytes2BinaryString(bytes));\n }\n\n static fromString(s: string): Hash {\n try {\n return Hash.fromBigInt(BigInt(s));\n } catch (e) {\n const deserializedHash = JSON.parse(s);\n const bytes = Uint8Array.from(Object.values(deserializedHash.bytes));\n return new Hash(bytes);\n }\n }\n static fromBigInt(i: bigint): Hash {\n if (!checkBigIntInField(i)) {\n throw new Error('NewBigIntFromHashBytes: Value not inside the Finite Field');\n }\n\n const bytes = bigIntToUINT8Array(i);\n\n return new Hash(swapEndianness(bytes));\n }\n\n static fromHex(h: string | undefined): Hash {\n if (!h) {\n return ZERO_HASH;\n }\n return new Hash(Hex.decodeString(h));\n }\n\n toJSON() {\n return this.string();\n }\n}\n\nexport const ZERO_HASH = new Hash();\n\n/**\n * @deprecated The method should not be used and will be removed in the next major version,\n * please use Hash.fromBigInt instead\n */\nexport const newHashFromBigInt = (bigNum: bigint): Hash => {\n return Hash.fromBigInt(bigNum);\n};\n\n/**\n * @deprecated The method should not be used and will be removed in the next major version,\n * please use Hash.fromBigInt instead\n */\nexport const newHashFromHex = (h: string): Hash => {\n return Hash.fromHex(h);\n};\n\n/**\n * @deprecated The method should not be used and will be removed in the next major version,\n * please use Hash.fromBigString instead\n */\nexport const newHashFromString = (decimalString: string): Hash => {\n return Hash.fromString(decimalString);\n};\n\nexport const hashElems = (e: Array<bigint>): Hash => {\n const hashBigInt = poseidon.hash(e);\n return Hash.fromBigInt(hashBigInt);\n};\n\nexport const hashElemsKey = (k: bigint, e: Array<bigint>): Hash => {\n const hashBigInt = poseidon.hash([...e, k]);\n return Hash.fromBigInt(hashBigInt);\n};\n\nexport const circomSiblingsFromSiblings = (siblings: Siblings, levels: number): Siblings => {\n for (let i = siblings.length; i < levels; i += 1) {\n siblings.push(ZERO_HASH);\n }\n return siblings;\n};\n", "// in Memory Database implementation\n\nimport { Bytes, Node } from '../../types';\nimport { ITreeStorage } from '../../types/storage';\nimport { Hash, ZERO_HASH } from '../hash/hash';\n\nexport class InMemoryDB implements ITreeStorage {\n prefix: Bytes;\n private _kvMap: {\n [k in string]: Node;\n };\n private _currentRoot: Hash;\n\n constructor(_prefix: Bytes) {\n this.prefix = _prefix;\n this._kvMap = {};\n this._currentRoot = ZERO_HASH;\n }\n\n async get(k: Bytes): Promise<Node | undefined> {\n const kBytes = new Uint8Array([...this.prefix, ...k]);\n const val = this._kvMap[kBytes.toString()] ? this._kvMap[kBytes.toString()] : undefined;\n return val;\n }\n\n async put(k: Bytes, n: Node): Promise<void> {\n const kBytes = new Uint8Array([...this.prefix, ...k]);\n this._kvMap[kBytes.toString()] = n;\n }\n\n async getRoot(): Promise<Hash> {\n return this._currentRoot;\n }\n\n async setRoot(r: Hash): Promise<void> {\n this._currentRoot = r;\n }\n}\n", "// LeafKey computes the key of a leaf node given the hIndex and hValue of the\n// entry of the leaf.\nimport { Hash, hashElemsKey } from '../hash/hash';\n\nimport { NODE_VALUE_BYTE_ARR_LENGTH } from '../../constants';\nimport { bigIntToUINT8Array } from './bigint';\nimport { Bytes, NodeType } from '../../types';\n\nexport const leafKey = async (k: Hash, v: Hash): Promise<Hash> => {\n return hashElemsKey(BigInt(1), [k.bigInt(), v.bigInt()]);\n};\n\nexport const nodeValue = (type: NodeType, a: Hash, b: Hash): Bytes => {\n const bytes = new Uint8Array(NODE_VALUE_BYTE_ARR_LENGTH);\n const kBytes = bigIntToUINT8Array(a.bigInt());\n const vBytes = bigIntToUINT8Array(b.bigInt());\n bytes[0] = type;\n\n for (let idx = 1; idx < 33; idx += 1) {\n bytes[idx] = kBytes[idx - 1];\n }\n\n for (let idx = 33; idx <= NODE_VALUE_BYTE_ARR_LENGTH; idx += 1) {\n bytes[idx] = vBytes[idx - 33];\n }\n\n return bytes;\n};\n", "import { Bytes, Node, NodeType } from '../../types';\nimport { Hash, ZERO_HASH, hashElems } from '../hash/hash';\n\nimport {\n EMPTY_NODE_STRING,\n EMPTY_NODE_VALUE,\n NODE_TYPE_EMPTY,\n NODE_TYPE_LEAF,\n NODE_TYPE_MIDDLE\n} from '../../constants';\nimport { leafKey, nodeValue } from '../utils/node';\n\nexport class NodeLeaf implements Node {\n type: NodeType;\n entry: [Hash, Hash];\n // cache used to avoid recalculating key\n private _key: Hash;\n\n constructor(k: Hash, v: Hash) {\n this.type = NODE_TYPE_LEAF;\n this.entry = [k, v];\n this._key = ZERO_HASH;\n }\n\n async getKey(): Promise<Hash> {\n if (this._key === ZERO_HASH) {\n return await leafKey(this.entry[0], this.entry[1]);\n }\n return this._key;\n }\n\n get value(): Bytes {\n return nodeValue(this.type, this.entry[0], this.entry[1]);\n }\n\n get string(): string {\n return `Leaf I:${this.entry[0]} D:${this.entry[1]}`;\n }\n}\n\nexport class NodeMiddle implements Node {\n type: NodeType;\n childL: Hash;\n childR: Hash;\n private _key: Hash;\n\n constructor(cL: Hash, cR: Hash) {\n this.type = NODE_TYPE_MIDDLE;\n this.childL = cL;\n this.childR = cR;\n this._key = ZERO_HASH;\n }\n\n async getKey(): Promise<Hash> {\n if (this._key === ZERO_HASH) {\n return hashElems([this.childL.bigInt(), this.childR.bigInt()]);\n }\n return this._key;\n }\n\n get value(): Bytes {\n return nodeValue(this.type, this.childL, this.childR);\n }\n\n get string(): string {\n return `Middle L:${this.childL} R:${this.childR}`;\n }\n}\n\nexport class NodeEmpty implements Node {\n type: NodeType;\n private _key: Hash;\n\n constructor() {\n this.type = NODE_TYPE_EMPTY;\n this._key = ZERO_HASH;\n }\n\n async getKey(): Promise<Hash> {\n return ZERO_HASH;\n }\n\n get value(): Bytes {\n return EMPTY_NODE_VALUE;\n }\n\n get string(): string {\n return EMPTY_NODE_STRING;\n }\n}\n", "/* eslint-disable no-case-declarations */\n\nimport { Bytes, Node } from '../../types';\nimport { ITreeStorage } from '../../types/storage';\nimport { Hash, ZERO_HASH } from '../hash/hash';\nimport { NODE_TYPE_EMPTY, NODE_TYPE_LEAF, NODE_TYPE_MIDDLE } from '../../constants';\nimport { NodeEmpty, NodeLeaf, NodeMiddle } from '../node/node';\nimport { bytes2Hex } from '../utils';\n\nexport class LocalStorageDB implements ITreeStorage {\n private _currentRoot: Hash;\n\n constructor(private readonly _prefix: Bytes) {\n const rootStr = localStorage.getItem(bytes2Hex(_prefix));\n if (rootStr) {\n const bytes: number[] = JSON.parse(rootStr);\n\n this._currentRoot = new Hash(Uint8Array.from(bytes));\n } else {\n this._currentRoot = ZERO_HASH;\n }\n }\n\n async get(k: Bytes): Promise<Node | undefined> {\n const kBytes = new Uint8Array([...this._prefix, ...k]);\n const key = bytes2Hex(kBytes);\n const val = localStorage.getItem(key);\n\n if (val === null) {\n return undefined;\n }\n\n const obj = JSON.parse(val);\n switch (obj.type) {\n case NODE_TYPE_EMPTY:\n return new NodeEmpty();\n case NODE_TYPE_MIDDLE:\n const cL = new Hash(Uint8Array.from(obj.childL));\n const cR = new Hash(Uint8Array.from(obj.childR));\n\n return new NodeMiddle(cL, cR);\n case NODE_TYPE_LEAF:\n const k = new Hash(Uint8Array.from(obj.entry[0]));\n const v = new Hash(Uint8Array.from(obj.entry[1]));\n\n return new NodeLeaf(k, v);\n }\n\n throw `error: value found for key ${bytes2Hex(kBytes)} is not of type Node`;\n }\n\n async put(k: Bytes, n: Node): Promise<void> {\n const kBytes = new Uint8Array([...this._prefix, ...k]);\n const key = bytes2Hex(kBytes);\n const toSerialize: Record<string, unknown> = {\n type: n.type\n };\n if (n instanceof NodeMiddle) {\n toSerialize.childL = Array.from(n.childL.bytes);\n toSerialize.childR = Array.from(n.childR.bytes);\n } else if (n instanceof NodeLeaf) {\n toSerialize.entry = [Array.from(n.entry[0].bytes), Array.from(n.entry[1].bytes)];\n }\n const val = JSON.stringify(toSerialize);\n localStorage.setItem(key, val);\n }\n\n async getRoot(): Promise<Hash> {\n return this._currentRoot;\n }\n\n async setRoot(r: Hash): Promise<void> {\n this._currentRoot = r;\n localStorage.setItem(bytes2Hex(this._prefix), JSON.stringify(Array.from(r.bytes)));\n }\n}\n", "import { Bytes, Node } from '../../types';\nimport { ITreeStorage } from '../../types/storage';\nimport { Hash, ZERO_HASH } from '../hash/hash';\nimport { bytes2Hex } from '../utils';\nimport { get, set, UseStore, createStore } from 'idb-keyval';\nimport { NODE_TYPE_EMPTY, NODE_TYPE_LEAF, NODE_TYPE_MIDDLE } from '../../constants';\nimport { NodeEmpty, NodeLeaf, NodeMiddle } from '../node/node';\n\nexport class IndexedDBStorage implements ITreeStorage {\n public static readonly storageName = 'merkle-tree';\n\n private readonly _prefixHash: string;\n private readonly _store: UseStore;\n\n private _currentRoot: Hash;\n\n constructor(private readonly _prefix: Bytes, databaseName?: string) {\n this._currentRoot = ZERO_HASH;\n this._prefixHash = bytes2Hex(_prefix);\n this._store = createStore(\n `${databaseName ?? IndexedDBStorage.storageName}-db`,\n IndexedDBStorage.storageName\n );\n }\n\n async get(k: Bytes): Promise<Node | undefined> {\n const kBytes = new Uint8Array([...this._prefix, ...k]);\n const key = bytes2Hex(kBytes);\n const obj = await get(key, this._store);\n if (obj === null || obj === undefined) {\n return undefined;\n }\n if (obj.type === NODE_TYPE_EMPTY) {\n return new NodeEmpty();\n }\n if (obj.type === NODE_TYPE_MIDDLE) {\n const cL = new Hash(Uint8Array.from(obj.childL.bytes));\n const cR = new Hash(Uint8Array.from(obj.childR.bytes));\n return new NodeMiddle(cL, cR);\n }\n if (obj.type === NODE_TYPE_LEAF) {\n const k = new Hash(Uint8Array.from(obj.entry[0].bytes));\n const v = new Hash(Uint8Array.from(obj.entry[1].bytes));\n\n return new NodeLeaf(k, v);\n }\n throw new Error(`error: value found for key ${key} is not of type Node`);\n }\n\n async put(k: Bytes, n: Node): Promise<void> {\n const kBytes = new Uint8Array([...this._prefix, ...k]);\n const key = bytes2Hex(kBytes);\n await set(key, n, this._store);\n }\n\n async getRoot(): Promise<Hash> {\n if (!this._currentRoot.equals(ZERO_HASH)) {\n return this._currentRoot;\n }\n const root = await get(this._prefixHash, this._store);\n\n if (!root) {\n this._currentRoot = ZERO_HASH;\n } else {\n this._currentRoot = new Hash(root.bytes);\n }\n return this._currentRoot;\n }\n\n async setRoot(r: Hash): Promise<void> {\n await set(this._prefixHash, r, this._store);\n this._currentRoot = r;\n }\n}\n", "import { ELEM_BYTES_LEN } from '../../constants';\nimport { Bytes } from '../../types';\nimport { bytes2Hex, newBigIntFromBytes, swapEndianness } from '../utils';\n\nexport class ElemBytes {\n // Little Endian\n private _bytes: Bytes;\n\n constructor() {\n this._bytes = new Uint8Array(ELEM_BYTES_LEN);\n }\n\n get value(): Bytes {\n return this._bytes;\n }\n\n set value(b: Bytes) {\n this._bytes = b;\n }\n\n bigInt(): bigint {\n return newBigIntFromBytes(swapEndianness(this._bytes));\n }\n\n string(): string {\n const hexStr = bytes2Hex(this._bytes.slice(0, 4));\n return `${hexStr}...`;\n }\n}\n", "import { ElemBytes } from './elemBytes';\nimport { DATA_LEN, DATA_LEN_BYTES, ELEM_BYTES_LEN } from '../../constants';\nimport { bytesEqual } from '../utils';\nimport { Bytes } from '../../types';\n\nexport class Data {\n private _value: Array<ElemBytes>;\n\n constructor() {\n this._value = new Array<ElemBytes>(DATA_LEN);\n }\n\n get value(): Array<ElemBytes> {\n return this._value;\n }\n\n set value(_v: ElemBytes[]) {\n if (_v.length !== DATA_LEN) {\n throw `expected bytes length to be ${DATA_LEN}, got ${_v.length}`;\n }\n this._value = _v;\n }\n\n bytes(): Bytes {\n const b = new Uint8Array(DATA_LEN * ELEM_BYTES_LEN);\n\n for (let idx = 0; idx < DATA_LEN; idx += 1) {\n this._value[idx].value.forEach((v, _idx) => {\n b[idx * ELEM_BYTES_LEN + _idx] = v;\n });\n }\n return b;\n }\n\n equal(d2: Data): boolean {\n return (\n bytesEqual(this._value[0].value, d2.value[0].value) &&\n bytesEqual(this._value[1].value, d2.value[1].value) &&\n bytesEqual(this._value[2].value, d2.value[2].value) &&\n bytesEqual(this._value[3].value, d2.value[3].value)\n );\n }\n}\n\nexport const newDataFromBytes = (bytes: Bytes): Data => {\n if (bytes.length !== DATA_LEN_BYTES) {\n throw `expected bytes length to be ${DATA_LEN_BYTES}, got ${bytes.length}`;\n }\n const d = new Data();\n const arr = new Array<ElemBytes>(DATA_LEN_BYTES);\n\n for (let i = 0; i < DATA_LEN; i += 1) {\n const tmp = new ElemBytes();\n tmp.value = bytes.slice(i * ELEM_BYTES_LEN, (i + 1) * DATA_LEN_BYTES);\n arr[i] = tmp;\n }\n\n d.value = arr;\n return d;\n};\n", "import { Data } from '../entry/data';\nimport { Hash, ZERO_HASH, hashElems } from '../hash/hash';\nimport { checkBigIntInField } from '../utils';\n\nimport { ElemBytes } from './elemBytes';\n\nexport class Entry {\n private _data: Data;\n private _hIndex: Hash;\n private _hValue: Hash;\n\n constructor(_data?: Data) {\n this._data = _data ? _data : new Data();\n this._hIndex = ZERO_HASH;\n this._hValue = ZERO_HASH;\n }\n\n get data(): Data {\n return this._data;\n }\n\n get index(): Array<ElemBytes> {\n return this._data.value.slice(0, 4);\n }\n\n get value(): Array<ElemBytes> {\n return this._data.value.slice(4, 8);\n }\n\n async hIndex(): Promise<Hash> {\n if (this._hIndex === ZERO_HASH) {\n return hashElems(elemBytesToBigInts(this.index));\n }\n return this._hIndex;\n }\n\n async hValue(): Promise<Hash> {\n if (this._hValue === ZERO_HASH) {\n return hashElems(elemBytesToBigInts(this.value));\n }\n return this._hValue;\n }\n\n hiHv(): Promise<{ hi: Hash; hv: Hash }> {\n return (async () => {\n const hi = await this.hIndex();\n const hv = await this.hValue();\n return { hi, hv };\n })();\n }\n\n bytes(): Array<ElemBytes> {\n return this._data.value;\n }\n\n equal(e2: Entry): boolean {\n return this._data.equal(e2.data);\n }\n\n clone(): Entry {\n return new Entry(this._data);\n }\n}\n\nexport const elemBytesToBigInts = (es: Array<ElemBytes>): Array<bigint> => {\n const bigInts = es.map((e) => {\n return e.bigInt();\n });\n\n return bigInts;\n};\n\nexport const checkEntryInField = (e: Entry): boolean => {\n const bigInts = elemBytesToBigInts(e.data.value);\n let flag = true;\n\n bigInts.forEach((b) => {\n if (!checkBigIntInField(b)) {\n flag = false;\n }\n });\n\n return flag;\n};\n", "export const ErrNodeKeyAlreadyExists = 'key already exists';\n// ErrKeyNotFound is used when a key is not found in the MerkleTree.\nexport const ErrKeyNotFound = 'Key not found in the MerkleTree';\n// ErrNodeBytesBadSize is used when the data of a node has an incorrect\n// size and can't be parsed.\nexport const ErrNodeBytesBadSize = 'node data has incorrect size in the DB';\n// ErrReachedMaxLevel is used when a traversal of the MT reaches the\n// maximum level.\nexport const ErrReachedMaxLevel = 'reached maximum level of the merkle tree';\n// ErrInvalidNodeFound is used when an invalid node is found and can't\n// be parsed.\nexport const ErrInvalidNodeFound = 'found an invalid node in the DB';\n// ErrInvalidProofBytes is used when a serialized proof is invalid.\nexport const ErrInvalidProofBytes = 'the serialized proof is invalid';\n// ErrInvalidDBValue is used when a value in the key value DB is\n// invalid (for example, it doesn't contain a byte header and a []byte\n// body of at least len=1.\nexport const ErrInvalidDBValue = 'the value in the DB is invalid';\n// ErrEntryIndexAlreadyExists is used when the entry index already\n// exists in the tree.\nexport const ErrEntryIndexAlreadyExists = 'the entry index already exists in the tree';\n// ErrNotWritable is used when the MerkleTree is not writable and a\n// write function is called\nexport const ErrNotWritable = 'Merkle Tree not writable';\n", "export const ErrNotFound = 'key not found';\n", "import { ICircomProcessorProof, ICircomVerifierProof, Siblings } from '../../types/merkletree';\nimport { Hash, ZERO_HASH } from '../hash/hash';\n\nexport class CircomVerifierProof implements ICircomVerifierProof {\n root: Hash;\n siblings: Siblings;\n oldKey: Hash;\n oldValue: Hash;\n isOld0: boolean;\n key: Hash;\n value: Hash;\n // 0: inclusion, 1: non inclusion\n fnc: number;\n\n constructor(\n _root: Hash = ZERO_HASH,\n _siblings: Siblings = [],\n _oldKey: Hash = ZERO_HASH,\n _oldValue: Hash = ZERO_HASH,\n _isOld0 = false,\n _key: Hash = ZERO_HASH,\n _value: Hash = ZERO_HASH,\n _fnc = 0\n ) {\n this.root = _root;\n this.siblings = _siblings;\n this.oldKey = _oldKey;\n this.oldValue = _oldValue;\n this.isOld0 = _isOld0;\n this.key = _key;\n this.value = _value;\n this.fnc = _fnc;\n }\n}\n\nexport class CircomProcessorProof implements ICircomProcessorProof {\n oldRoot: Hash;\n newRoot: Hash;\n siblings: Siblings;\n oldKey: Hash;\n oldValue: Hash;\n newKey: Hash;\n newValue: Hash;\n isOld0: boolean;\n // 0: NOP, 1: Update, 2: Insert, 3: Delete\n fnc: number;\n\n constructor(\n _oldRoot: Hash = ZERO_HASH,\n _newRoot: Hash = ZERO_HASH,\n _siblings: Siblings = [],\n _oldKey: Hash = ZERO_HASH,\n _oldValue: Hash = ZERO_HASH,\n _newKey: Hash = ZERO_HASH,\n _newValue: Hash = ZERO_HASH,\n _isOld0 = false,\n _fnc = 0\n ) {\n this.oldRoot = _oldRoot;\n this.newRoot = _newRoot;\n this.siblings = _siblings;\n this.oldKey = _oldKey;\n this.oldValue = _oldValue;\n this.newKey = _newKey;\n this.newValue = _newValue;\n this.isOld0 = _isOld0;\n this.fnc = _fnc;\n }\n}\n", "export const ErrNodeAuxNonExistAgainstHIndex =\n 'non-existence proof being checked against hIndex equal to nodeAux';\n", "import { NodeAux, Siblings } from '../../types/merkletree';\nimport { ELEM_BYTES_LEN, NOT_EMPTIES_LEN, PROOF_FLAG_LEN } from '../../constants';\nimport { bytesEqual, getPath, setBitBigEndian, siblings2Bytes, testBitBigEndian } from '../utils';\nimport { Hash, ZERO_HASH } from '../hash/hash';\nimport { NodeMiddle } from '../node/node';\nimport { leafKey } from '../utils/node';\nimport { ErrNodeAuxNonExistAgainstHIndex } from '../errors/proof';\nimport { Bytes } from '../../types';\n\nexport interface ProofJSON {\n existence: boolean;\n siblings: string[];\n node_aux?: NodeAuxJSON; // this is a right representation of auxiliary node field according to the specification, nodeAux will be deprecated.\n /**\n * @deprecated old version is deprecated, do not use it.\n */\n nodeAux?: NodeAuxJSON; // old version of representation of auxiliary node.\n}\n\nexport interface NodeAuxJSON {\n key: string;\n value: string;\n}\n\nexport class Proof {\n existence: boolean;\n private depth: number;\n // notEmpties is a bitmap of non-empty siblings found in siblings\n private notEmpties: Bytes;\n private siblings: Siblings;\n nodeAux: NodeAux | undefined;\n\n constructor(obj?: { siblings: Siblings; nodeAux: NodeAux | undefined; existence: boolean }) {\n this.existence = obj?.existence ?? false;\n this.depth = 0;\n this.nodeAux = obj?.nodeAux;\n\n const { siblings, notEmpties } = this.reduceSiblings(obj?.siblings);\n this.siblings = siblings;\n this.notEmpties = notEmpties;\n }\n\n bytes(): Bytes {\n let bsLen = PROOF_FLAG_LEN + this.notEmpties.length + ELEM_BYTES_LEN * this.siblings.length;\n\n if (typeof this.nodeAux !== 'undefined') {\n bsLen += 2 * ELEM_BYTES_LEN;\n }\n\n const arrBuff = new ArrayBuffer(bsLen);\n const bs = new Uint8Array(arrBuff);\n\n if (!this.existence) {\n bs[0] |= 1;\n }\n bs[1] = this.depth;\n bs.set(this.notEmpties, PROOF_FLAG_LEN);\n const siblingBytes = siblings2Bytes(this.siblings);\n bs.set(siblingBytes, this.notEmpties.length + PROOF_FLAG_LEN);\n\n if (typeof this.nodeAux !== 'undefined') {\n bs[0] |= 2;\n bs.set(this.nodeAux.key.value, bs.length - 2 * ELEM_BYTES_LEN);\n bs.set(this.nodeAux.value.value, bs.length - 1 * ELEM_BYTES_LEN);\n }\n return bs;\n }\n\n toJSON() {\n return {\n existence: this.existence,\n siblings: this.allSiblings().map((s) => s.toJSON()),\n node_aux: this.nodeAux\n ? {\n key: this.nodeAux.key.toJSON(),\n value: this.nodeAux.value.toJSON()\n }\n : undefined\n };\n }\n\n private reduceSiblings(siblings?: Siblings): { notEmpties: Uint8Array; siblings: Siblings } {\n const reducedSiblings: Siblings = [];\n const notEmpties = new Uint8Array(NOT_EMPTIES_LEN);\n\n if (!siblings) {\n return { siblings: reducedSiblings, notEmpties };\n }\n for (let i = 0; i < siblings.length; i++) {\n const sibling = siblings[i];\n if (JSON.stringify(siblings[i]) !== JSON.stringify(ZERO_HASH)) {\n setBitBigEndian(notEmpties, i);\n reducedSiblings.push(sibling);\n this.depth = i + 1;\n }\n }\n return { notEmpties, siblings: reducedSiblings };\n }\n\n public static fromJSON(obj: ProofJSON): Proof {\n let nodeAux: NodeAux | undefined = undefined;\n const nodeAuxJson: NodeAuxJSON | undefined = obj.node_aux ?? obj.nodeAux; // we keep backward compatibility and support both representations\n if (nodeAuxJson) {\n nodeAux = {\n key: Hash.fromString(nodeAuxJson.key),\n value: Hash.fromString(nodeAuxJson.value)\n };\n }\n const existence = obj.existence ?? false;\n\n const siblings: Siblings = obj.siblings.map((s) => Hash.fromString(s));\n\n return new Proof({ existence, nodeAux, siblings });\n }\n\n allSiblings(): Siblings {\n return Proof.buildAllSiblings(this.depth, this.notEmpties, this.siblings);\n }\n\n public static buildAllSiblings(\n depth: number,\n notEmpties: Uint8Array,\n siblings: Hash[]\n ): Siblings {\n let sibIdx = 0;\n const allSiblings: Siblings = [];\n\n for (let i = 0; i < depth; i += 1) {\n if (testBitBigEndian(notEmpties, i)) {\n allSiblings.push(siblings[sibIdx]);\n sibIdx += 1;\n } else {\n allSiblings.push(ZERO_HASH);\n }\n }\n return allSiblings;\n }\n}\n\n/**\n * @deprecated The method should not be used and will be removed in the next major version,\n * please use proof.allSiblings instead\n */\n// eslint-disable-next-line @cspell/spellchecker\nexport const siblignsFroomProof = (proof: Proof): Siblings => {\n return proof.allSiblings();\n};\n\nexport const verifyProof = async (\n rootKey: Hash,\n proof: Proof,\n k: bigint,\n v: bigint\n): Promise<boolean> => {\n try {\n const rFromProof = await rootFromProof(proof, k, v);\n return bytesEqual(rootKey.value, rFromProof.value);\n } catch (err) {\n if (err === ErrNodeAuxNonExistAgainstHIndex) {\n return false;\n }\n throw err;\n }\n};\n\nexport const rootFromProof = async (proof: Proof, k: bigint, v: bigint): Promise<Hash> => {\n const kHash = Hash.fromBigInt(k);\n const vHash = Hash.fromBigInt(v);\n let midKey: Hash;\n\n if (proof.existence) {\n midKey = await leafKey(kHash, vHash);\n } else {\n if (typeof proof.nodeAux === 'undefined') {\n midKey = ZERO_HASH;\n } else {\n const nodeAux = proof.nodeAux as unknown as NodeAux;\n if (bytesEqual(kHash.value, nodeAux.key.value)) {\n throw ErrNodeAuxNonExistAgainstHIndex;\n }\n midKey = await leafKey(nodeAux.key, nodeAux.value);\n }\n }\n\n const siblings = proof.allSiblings();\n\n const path = getPath(siblings.length, kHash.value);\n\n for (let i = siblings.length - 1; i >= 0; i -= 1) {\n if (path[i]) {\n midKey = await new NodeMiddle(siblings[i], midKey).getKey();\n } else {\n midKey = await new NodeMiddle(midKey, siblings[i]).getKey();\n }\n }\n\n return midKey;\n};\n", "import { ITreeStorage } from '../../types/storage';\nimport { Hash, ZERO_HASH, circomSiblingsFromSiblings } from '../hash/hash';\n\nimport { Node } from '../../types';\nimport { NODE_TYPE_EMPTY, NODE_TYPE_LEAF, NODE_TYPE_MIDDLE } from '../../constants';\nimport { NodeEmpty, NodeLeaf, NodeMiddle } from '../node/node';\nimport { bytesEqual, getPath } from '../utils';\nimport { NodeAux, Siblings } from '../../types/merkletree';\nimport { checkBigIntInField } from '../utils/crypto';\nimport { CircomProcessorProof, CircomVerifierProof } from './circom';\nimport {\n ErrEntryIndexAlreadyExists,\n ErrInvalidNodeFound,\n ErrKeyNotFound,\n ErrNotFound,\n ErrNotWritable,\n ErrReachedMaxLevel\n} from '../errors';\nimport { Proof } from './proof';\nimport { Entry, checkEntryInField } from '../entry';\n\nexport class Merkletree {\n private _db: ITreeStorage;\n private _root!: Hash;\n private _writable: boolean;\n private _maxLevel: number;\n\n constructor(_db: ITreeStorage, _writable: boolean, _maxLevels: number) {\n this._db = _db;\n this._writable = _writable;\n this._maxLevel = _maxLevels;\n }\n\n async root(): Promise<Hash> {\n if (!this._root) {\n this._root = await this._db.getRoot();\n }\n return this._root;\n }\n\n get maxLevels(): number {\n return this._maxLevel;\n }\n\n async add(k: bigint, v: bigint): Promise<void> {\n if (!this._writable) {\n throw ErrNotWritable;\n }\n\n this._root = await this.root();\n const kHash = Hash.fromBigInt(k);\n const vHash = Hash.fromBigInt(v);\n\n const newNodeLeaf = new NodeLeaf(kHash, vHash);\n const path = getPath(this.maxLevels, kHash.value);\n\n const newRootKey = await this.addLeaf(newNodeLeaf, this._root, 0, path);\n this._root = newRootKey;\n await this._db.setRoot(this._root);\n }\n\n async updateNode(n: Node): Promise<Hash> {\n if (!this._writable) {\n throw ErrNotWritable;\n }\n\n if (n.type === NODE_TYPE_EMPTY) {\n return await n.getKey();\n }\n\n const k = await n.getKey();\n\n await this._db.put(k.value, n);\n return k;\n }\n\n async addNode(n: Node): Promise<Hash> {\n if (!this._writable) {\n throw ErrNotWritable;\n }\n if (n.type === NODE_TYPE_EMPTY) {\n return await n.getKey();\n }\n\n const k = await n.getKey();\n // if (typeof this.#db.get(k.value) !== 'undefined') {\n // throw ErrNodeKeyAlreadyExists;\n // }\n\n await this._db.put(k.value, n);\n return k;\n }\n\n async addEntry(e: Entry): Promise<void> {\n if (!this._writable) {\n throw ErrNotWritable;\n }\n\n if (!checkEntryInField(e)) {\n throw 'elements not inside the finite field over r';\n }\n this._root = await this._db.getRoot();\n const hIndex = await e.hIndex();\n const hValue = await e.hValue();\n\n const newNodeLeaf = new NodeLeaf(hIndex, hValue);\n const path = getPath(this.maxLevels, hIndex.value);\n\n const newRootKey = await this.addLeaf(newNodeLeaf, this._root, 0, path);\n this._root = newRootKey;\n await this._db.setRoot(this._root);\n }\n\n async pushLeaf(\n newLeaf: Node,\n oldLeaf: Node,\n lvl: number,\n pathNewLeaf: Array<boolean>,\n pathOldLeaf: Array<boolean>\n ): Promise<Hash> {\n if (lvl > this._maxLevel - 2) {\n throw new Error(ErrReachedMaxLevel);\n }\n\n let newNodeMiddle: NodeMiddle;\n\n if (pathNewLeaf[lvl] === pathOldLeaf[lvl]) {\n const nextKey = await this.pushLeaf(newLeaf, oldLeaf, lvl + 1, pathNewLeaf, pathOldLeaf);\n if (pathNewLeaf[lvl]) {\n newNodeMiddle = new NodeMiddle(new Hash(), nextKey);\n } else {\n newNodeMiddle = new NodeMiddle(nextKey, new Hash());\n }\n\n return await this.addNode(newNodeMiddle);\n }\n\n const oldLeafKey = await oldLeaf.getKey();\n const newLeafKey = await newLeaf.getKey();\n\n if (pathNewLeaf[lvl]) {\n newNodeMiddle = new NodeMiddle(oldLeafKey, newLeafKey);\n } else {\n newNodeMiddle = new NodeMiddle(newLeafKey, oldLeafKey);\n }\n\n await this.addNode(newLeaf);\n return await this.addNode(newNodeMiddle);\n }\n\n async addLeaf(newLeaf: NodeLeaf, key: Hash, lvl: number, path: Array<boolean>): Promise<Hash> {\n if (lvl > this._maxLevel - 1) {\n throw new Error(ErrReachedMaxLevel);\n }\n\n const n = await this.getNode(key);\n if (typeof n === 'undefined') {\n throw ErrNotFound;\n }\n\n switch (n.type) {\n case NODE_TYPE_EMPTY:\n return this.addNode(newLeaf);\n case NODE_TYPE_LEAF: {\n const nKey = (n as NodeLeaf).entry[0];\n const newLeafKey = newLeaf.entry[0];\n\n if (bytesEqual(nKey.value, newLeafKey.value)) {\n throw ErrEntryIndexAlreadyExists;\n }\n\n const pathOldLeaf = getPath(this.maxLevels, nKey.value);\n return this.pushLeaf(newLeaf, n, lvl, path, pathOldLeaf);\n }\n case NODE_TYPE_MIDDLE: {\n n as NodeMiddle;\n let newNodeMiddle: NodeMiddle;\n\n if (path[lvl]) {\n const nextKey = await this.addLeaf(newLeaf, (n as NodeMiddle).childR, lvl + 1, path);\n newNodeMiddle = new NodeMiddle((n as NodeMiddle).childL, nextKey);\n } else {\n const nextKey = await this.addLeaf(newLeaf, (n as NodeMiddle).childL, lvl + 1, path);\n newNodeMiddle = new NodeMiddle(nextKey, (n as NodeMiddle).childR);\n }\n\n return this.addNode(newNodeMiddle);\n }\n default: {\n throw ErrInvalidNodeFound;\n }\n }\n }\n\n async get(k: bigint): Promise<{ key: bigint; value: bigint; siblings: Siblings }> {\n const kHash = Hash.fromBigInt(k);\n const path = getPath(this.maxLevels, kHash.value);\n\n let nextKey = await this.root();\n const siblings: Siblings = [];\n\n for (let i = 0; i < this.maxLevels; i++) {\n const n = await this.getNode(nextKey);\n if (typeof n === 'undefined') {\n throw ErrKeyNotFound;\n }\n\n switch (n.type) {\n case NODE_TYPE_EMPTY:\n return {\n key: BigInt('0'),\n value: BigInt('0'),\n siblings\n };\n case NODE_TYPE_LEAF:\n // if (bytesEqual(kHash.value, (n as NodeLeaf).entry[0].value)) {\n // return {\n // key: (n as NodeLeaf).entry[0].BigInt(),\n // value: (n as NodeLeaf).entry[1].BigInt(),\n // siblings,\n // };\n // }\n return {\n key: (n as NodeLeaf).entry[0].bigInt(),\n value: (n as NodeLeaf).entry[1].bigInt(),\n siblings\n };\n case NODE_TYPE_MIDDLE:\n if (path[i]) {\n nextKey = (n as NodeMiddle).childR;\n siblings.push((n as NodeMiddle).childL);\n } else {\n nextKey = (n as NodeMiddle).childL;\n siblings.push((n as NodeMiddle).childR);\n }\n break;\n default:\n throw ErrInvalidNodeFound;\n }\n }\n\n throw new Error(ErrReachedMaxLevel);\n }\n\n async update(k: bigint, v: bigint): Promise<CircomProcessorProof> {\n if (!this._writable) {\n throw ErrNotWritable;\n }\n\n if (!checkBigIntInField(k)) {\n throw 'key not inside the finite field';\n }\n if (!checkBigIntInField(v)) {\n throw 'key not inside the finite field';\n }\n\n const kHash = Hash.fromBigInt(k);\n const vHash = Hash.fromBigInt(v);\n\n const path = getPath(this.maxLevels, kHash.value);\n\n const cp = new CircomProcessorProof();\n\n cp.fnc = 1;\n cp.oldRoot = await this.root();\n cp.oldKey = kHash;\n cp.newKey = kHash;\n cp.newValue = vHash;\n\n let nextKey = await this.root();\n const siblings: Siblings = [];\n\n for (let i = 0; i < this.maxLevels; i += 1) {\n const n = await this.getNode(nextKey);\n if (typeof n === 'undefined') {\n throw ErrNotFound;\n }\n\n switch (n.type) {\n case NODE_TYPE_EMPTY:\n throw ErrKeyNotFound;\n case NODE_TYPE_LEAF:\n if (bytesEqual(kHash.value, (n as NodeLeaf).entry[0].value)) {\n cp.oldValue = (n as NodeLeaf).entry[1];\n cp.siblings = circomSiblingsFromSiblings([...siblings], this.maxLevels);\n const newNodeLeaf = new NodeLeaf(kHash, vHash);\n await this.updateNode(newNodeLeaf);\n\n const newRootKey = await this.recalculatePathUntilRoot(path, newNodeLeaf, siblings);\n\n this._root = newRootKey;\n await this._db.setRoot(newRootKey);\n cp.newRoot = newRootKey;\n return cp;\n }\n break;\n case NODE_TYPE_MIDDLE:\n if (path[i]) {\n nextKey = (n as NodeMiddle).childR;\n siblings.push((n as NodeMiddle).childL);\n } else {\n nextKey = (n as NodeMiddle).childL;\n siblings.push((n as NodeMiddle).childR);\n }\n break;\n default:\n throw ErrInvalidNodeFound;\n }\n }\n\n throw ErrKeyNotFound;\n }\n\n async getNode(k: Hash): Promise<Node | undefined> {\n if (bytesEqual(k.value, ZERO_HASH.value)) {\n return new NodeEmpty();\n }\n return await this._db.get(k.value);\n }\n\n async recalculatePathUntilRoot(\n path: Array<boolean>,\n node: Node,\n siblings: Siblings\n ): Promise<Hash> {\n for (let i = siblings.length - 1; i >= 0; i -= 1) {\n const nodeKey = await node.getKey();\n if (path[i]) {\n node = new NodeMiddle(siblings[i], nodeKey);\n } else {\n node = new NodeMiddle(nodeKey, siblings[i]);\n }\n await this.addNode(node);\n }\n\n const nodeKey = await node.getKey();\n return nodeKey;\n }\n\n // Delete removes the specified Key from the MerkleTree and updates the path\n // from the deleted key to the Root with the new values. This method removes\n // the key from the MerkleTree, but does not remove the old nodes from the\n // key-value database; this means that if the tree is accessed by an old Root\n // where the key was not deleted yet, the key will still exist. If is desired\n // to remove the key-values from the database that are not under the current\n // Root, an option could be to dump all the leaves (using mt.DumpLeafs) and\n // import them in a new MerkleTree in a new database (using\n // mt.ImportDumpedLeafs), but this will loose all the Root history of the\n // MerkleTree\n async delete(k: bigint): Promise<void> {\n if (!this._writable) {\n throw ErrNotWritable;\n }\n\n const kHash = Hash.fromBigInt(k);\n const path = getPath(this.maxLevels, kHash.value);\n\n let nextKey = this._root;\n const siblings: Siblings = [];\n\n for (let i = 0; i < this._maxLevel; i += 1) {\n const n = await this.getNode(nextKey);\n if (typeof n === 'undefined') {\n throw ErrNotFound;\n }\n switch (n.type) {\n case NODE_TYPE_EMPTY:\n throw ErrKeyNotFound;\n case NODE_TYPE_LEAF:\n if (bytesEqual(kHash.bytes, (n as NodeLeaf).entry[0].value)) {\n await this.rmAndUpload(path, kHash, siblings);\n return;\n }\n throw ErrKeyNotFound;\n case NODE_TYPE_MIDDLE:\n if (path[i]) {\n nextKey = (n as NodeMiddle).childR;\n siblings.push((n as NodeMiddle).childL);\n } else {\n nextKey = (n as NodeMiddle).childL;\n siblings.push((n as NodeMiddle).childR);\n }\n break;\n default:\n throw ErrInvalidNodeFound;\n }\n }\n\n throw ErrKeyNotFound;\n }\n\n async rmAndUpload(path: Array<boolean>, kHash: Hash, siblings: Siblings): Promise<void> {\n if (siblings.length === 0) {\n this._root = ZERO_HASH;\n await this._db.setRoot(this._root);\n return;\n }\n\n const toUpload = siblings[siblings.length - 1];\n if (siblings.length < 2) {\n this._root = siblings[0];\n await this._db.setRoot(this._root);\n }\n\n const nearestSibling = await this._db.get(toUpload.bytes);\n if (nearestSibling?.type === NODE_TYPE_MIDDLE) {\n let newNode: Node;\n if (path[siblings.length - 1]) {\n newNode = new NodeMiddle(toUpload, ZERO_HASH);\n } else {\n newNode = new NodeMiddle(ZERO_HASH, toUpload);\n }\n await this.addNode(newNode);\n const newRootKey = await this.recalculatePathUntilRoot(\n path,\n newNode,\n siblings.slice(0, siblings.length - 1)\n );\n this._root = newRootKey;\n await this._db.setRoot(this._root);\n return;\n }\n\n for (let i = siblings.length - 2; i >= 0; i -= 1) {\n if (!bytesEqual(siblings[i].value, ZERO_HASH.value)) {\n let newNode: Node;\n if (path[i]) {\n newNode = new NodeMiddle(siblings[i], toUpload);\n } else {\n newNode = new NodeMiddle(toUpload, siblings[i]);\n }\n await this.addNode(newNode);\n\n const newRootKey = await this.recalculatePathUntilRoot(path, newNode, siblings.slice(0, i));\n\n this._root = newRootKey;\n await this._db.setRoot(this._root);\n break;\n }\n\n if (i === 0) {\n this._root = toUpload;\n await this._db.setRoot(this._root);\n break;\n }\n }\n }\n\n async recWalk(key: Hash, f: (n: Node) => Promise<void>): Promise<void> {\n const n = await this.getNode(key);\n if (typeof n === 'undefined') {\n throw ErrNotFound;\n }\n\n switch (n.type) {\n case NODE_TYPE_EMPTY:\n await f(n);\n break;\n case NODE_TYPE_LEAF:\n await f(n);\n break;\n case NODE_TYPE_MIDDLE:\n await f(n);\n await this.recWalk((n as NodeMiddle).childL, f);\n await this.recWalk((n as NodeMiddle).childR, f);\n break;\n default:\n throw ErrInvalidNodeFound;\n }\n }\n\n async walk(rootKey: Hash, f: (n: Node) => Promise<void>): Promise<void> {\n if (bytesEqual(rootKey.value, ZERO_HASH.value)) {\n rootKey = await this.root();\n }\n await this.recWalk(rootKey, f);\n }\n\n async generateCircomVerifierProof(k: bigint, rootKey: Hash): Promise<CircomVerifierProof> {\n const cp = await this.generateSCVerifierProof(k, rootKey);\n cp.siblings = circomSiblingsFromSiblings(cp.siblings, this.maxLevels);\n return cp;\n }\n\n async generateSCVerifierProof(k: bigint, rootKey: Hash): Promise<CircomVerifierProof> {\n if (bytesEqual(rootKey.value, ZERO_HASH.value)) {\n rootKey = await this.root();\n }\n\n const { proof, value } = await this.generateProof(k, rootKey);\n const cp = new CircomVerifierProof();\n cp.root = rootKey;\n cp.siblings = proof.allSiblings();\n if (typeof proof.nodeAux !== 'undefined') {\n cp.oldKey = proof.nodeAux.key;\n cp.oldValue = proof.nodeAux.value;\n } else {\n cp.oldKey = ZERO_HASH;\n cp.oldValue = ZERO_HASH;\n }\n cp.key = Hash.fromBigInt(k);\n cp.value = Hash.fromBigInt(value);\n\n if (proof.existence) {\n cp.fnc = 0;\n } else {\n cp.fnc = 1;\n }\n\n return cp;\n }\n\n async generateProof(k: bigint, rootKey?: Hash): Promise<{ proof: Proof; value: bigint }> {\n let siblingKey: Hash;\n\n const kHash = Hash.fromBigInt(k);\n const path = getPath(this.maxLevels, kHash.value);\n if (!rootKey) {\n rootKey = await this.root();\n }\n let nextKey = rootKey;\n\n let depth = 0;\n let existence = false;\n const siblings: Siblings = [];\n let nodeAux: NodeAux | undefined;\n\n for (depth = 0; depth < this.maxLevels; depth += 1) {\n const n = await this.getNode(nextKey);\n if (typeof n === 'undefined') {\n throw ErrNotFound;\n }\n switch (n.type) {\n case NODE_TYPE_EMPTY:\n return {\n proof: new Proof({\n existence,\n nodeAux,\n siblings\n }),\n value: BigInt('0')\n };\n case NODE_TYPE_LEAF:\n if (bytesEqual(kHash.value, (n as NodeLeaf).entry[0].value)) {\n existence = true;\n\n return {\n proof: new Proof({\n existence,\n nodeAux,\n siblings\n }),\n value: (n as NodeLeaf).entry[1].bigInt()\n };\n }\n nodeAux = {\n key: (n as NodeLeaf).entry[0],\n value: (n as NodeLeaf).entry[1]\n };\n return {\n proof: new Proof({\n existence,\n nodeAux,\n siblings\n }),\n value: (n as NodeLeaf).entry[1].bigInt()\n };\n case NODE_TYPE_MIDDLE:\n if (path[depth]) {\n nextKey = (n as NodeMiddle).childR;\n siblingKey = (n as NodeMiddle).childL;\n } else {\n nextKey = (n as NodeMiddle).childL;\n siblingKey = (n as NodeMiddle).childR;\n }\n break;\n default:\n throw ErrInvalidNodeFound;\n }\n siblings.push(siblingKey);\n }\n throw ErrKeyNotFound;\n }\n\n async addAndGetCircomProof(k: bigint, v: bigint): Promise<CircomProcessorProof> {\n const cp = new CircomProcessorProof();\n cp.fnc = 2;\n cp.oldRoot = await this.root();\n let key = BigInt('0');\n let value = BigInt('0');\n let siblings: Siblings = [];\n try {\n const res = await this.get(k);\n key = res.key;\n value = res.value;\n siblings = res.siblings;\n } catch (err) {\n if (err !== ErrKeyNotFound) {\n throw err;\n }\n }\n\n if (typeof key === 'undefined' || typeof value === 'undefined') {\n throw 'key/value undefined';\n }\n\n cp.oldKey = Hash.fromBigInt(key);\n cp.oldValue = Hash.fromBigInt(value);\n\n if (bytesEqual(cp.oldKey.value, ZERO_HASH.value)) {\n cp.isOld0 = true;\n }\n\n cp.siblings = circomSiblingsFromSiblings(siblings, this.maxLevels);\n await this.add(k, v);\n\n cp.newKey = Hash.fromBigInt(k);\n cp.newValue = Hash.fromBigInt(v);\n cp.newRoot = await this.root();\n\n return cp;\n }\n\n // NOTE: for now it only prints to console, will be updated in future\n async graphViz(rootKey: Hash): Promise<void> {\n let cnt = 0;\n\n await this.walk(rootKey,