UNPKG

@jackallabs/dogwood-tree

Version:

Jackal Labs JS Merkletree implementation

1,210 lines (1,209 loc) 32 kB
var __defProp = Object.defineProperty; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); function calculateBranches(leaves) { return Math.pow(2, Math.ceil(Math.log2(leaves))); } function stringToUint8(str) { const uintView = new Uint8Array(str.length); for (let i = 0; i < str.length; i++) { uintView[i] = str.charCodeAt(i); } return uintView; } /*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ function isBytes(a) { return a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; } function anumber(n) { if (!Number.isSafeInteger(n) || n < 0) throw new Error("positive integer expected, got " + n); } function abytes(b, ...lengths) { if (!isBytes(b)) throw new Error("Uint8Array expected"); if (lengths.length > 0 && !lengths.includes(b.length)) throw new Error("Uint8Array expected of length " + lengths + ", got length=" + b.length); } function aexists(instance, checkFinished = true) { if (instance.destroyed) throw new Error("Hash instance has been destroyed"); if (checkFinished && instance.finished) throw new Error("Hash#digest() has already been called"); } function aoutput(out, instance) { abytes(out); const min = instance.outputLen; if (out.length < min) { throw new Error("digestInto() expects output buffer of length at least " + min); } } function u8(arr) { return new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); } function u32(arr) { return new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); } function clean(...arrays) { for (let i = 0; i < arrays.length; i++) { arrays[i].fill(0); } } function rotr(word, shift) { return word << 32 - shift | word >>> shift; } const isLE = /* @__PURE__ */ (() => new Uint8Array(new Uint32Array([287454020]).buffer)[0] === 68)(); function byteSwap(word) { return word << 24 & 4278190080 | word << 8 & 16711680 | word >>> 8 & 65280 | word >>> 24 & 255; } const swap8IfBE = isLE ? (n) => n : (n) => byteSwap(n); function byteSwap32(arr) { for (let i = 0; i < arr.length; i++) { arr[i] = byteSwap(arr[i]); } return arr; } const swap32IfBE = isLE ? (u) => u : byteSwap32; function utf8ToBytes(str) { if (typeof str !== "string") throw new Error("string expected"); return new Uint8Array(new TextEncoder().encode(str)); } function toBytes(data) { if (typeof data === "string") data = utf8ToBytes(data); abytes(data); return data; } class Hash { } function createHasher(hashCons) { const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); const tmp = hashCons(); hashC.outputLen = tmp.outputLen; hashC.blockLen = tmp.blockLen; hashC.create = () => hashCons(); return hashC; } function createXOFer(hashCons) { const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); const tmp = hashCons({}); hashC.outputLen = tmp.outputLen; hashC.blockLen = tmp.blockLen; hashC.create = (opts) => hashCons(opts); return hashC; } const SHA256_IV = /* @__PURE__ */ Uint32Array.from([ 1779033703, 3144134277, 1013904242, 2773480762, 1359893119, 2600822924, 528734635, 1541459225 ]); const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); const _32n = /* @__PURE__ */ BigInt(32); function fromBig(n, le = false) { if (le) return { h: Number(n & U32_MASK64), l: Number(n >> _32n & U32_MASK64) }; return { h: Number(n >> _32n & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; } function split(lst, le = false) { const len = lst.length; let Ah = new Uint32Array(len); let Al = new Uint32Array(len); for (let i = 0; i < len; i++) { const { h, l } = fromBig(lst[i], le); [Ah[i], Al[i]] = [h, l]; } return [Ah, Al]; } const rotlSH = (h, l, s) => h << s | l >>> 32 - s; const rotlSL = (h, l, s) => l << s | h >>> 32 - s; const rotlBH = (h, l, s) => l << s - 32 | h >>> 64 - s; const rotlBL = (h, l, s) => h << s - 32 | l >>> 64 - s; function G1s(a, b, c, d, x) { a = a + b + x | 0; d = rotr(d ^ a, 16); c = c + d | 0; b = rotr(b ^ c, 12); return { a, b, c, d }; } function G2s(a, b, c, d, x) { a = a + b + x | 0; d = rotr(d ^ a, 8); c = c + d | 0; b = rotr(b ^ c, 7); return { a, b, c, d }; } class BLAKE2 extends Hash { constructor(blockLen, outputLen) { super(); this.finished = false; this.destroyed = false; this.length = 0; this.pos = 0; anumber(blockLen); anumber(outputLen); this.blockLen = blockLen; this.outputLen = outputLen; this.buffer = new Uint8Array(blockLen); this.buffer32 = u32(this.buffer); } update(data) { aexists(this); data = toBytes(data); abytes(data); const { blockLen, buffer, buffer32 } = this; const len = data.length; const offset = data.byteOffset; const buf = data.buffer; for (let pos = 0; pos < len; ) { if (this.pos === blockLen) { swap32IfBE(buffer32); this.compress(buffer32, 0, false); swap32IfBE(buffer32); this.pos = 0; } const take = Math.min(blockLen - this.pos, len - pos); const dataOffset = offset + pos; if (take === blockLen && !(dataOffset % 4) && pos + take < len) { const data32 = new Uint32Array(buf, dataOffset, Math.floor((len - pos) / 4)); swap32IfBE(data32); for (let pos32 = 0; pos + blockLen < len; pos32 += buffer32.length, pos += blockLen) { this.length += blockLen; this.compress(data32, pos32, false); } swap32IfBE(data32); continue; } buffer.set(data.subarray(pos, pos + take), this.pos); this.pos += take; this.length += take; pos += take; } return this; } digestInto(out) { aexists(this); aoutput(out, this); const { pos, buffer32 } = this; this.finished = true; clean(this.buffer.subarray(pos)); swap32IfBE(buffer32); this.compress(buffer32, 0, true); swap32IfBE(buffer32); const out32 = u32(out); this.get().forEach((v, i) => out32[i] = swap8IfBE(v)); } digest() { const { buffer, outputLen } = this; this.digestInto(buffer); const res = buffer.slice(0, outputLen); this.destroy(); return res; } _cloneInto(to) { const { buffer, length, finished, destroyed, outputLen, pos } = this; to || (to = new this.constructor({ dkLen: outputLen })); to.set(...this.get()); to.buffer.set(buffer); to.destroyed = destroyed; to.finished = finished; to.length = length; to.pos = pos; to.outputLen = outputLen; return to; } clone() { return this._cloneInto(); } } function compress(s, offset, msg, rounds, v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) { let j = 0; for (let i = 0; i < rounds; i++) { ({ a: v0, b: v4, c: v8, d: v12 } = G1s(v0, v4, v8, v12, msg[offset + s[j++]])); ({ a: v0, b: v4, c: v8, d: v12 } = G2s(v0, v4, v8, v12, msg[offset + s[j++]])); ({ a: v1, b: v5, c: v9, d: v13 } = G1s(v1, v5, v9, v13, msg[offset + s[j++]])); ({ a: v1, b: v5, c: v9, d: v13 } = G2s(v1, v5, v9, v13, msg[offset + s[j++]])); ({ a: v2, b: v6, c: v10, d: v14 } = G1s(v2, v6, v10, v14, msg[offset + s[j++]])); ({ a: v2, b: v6, c: v10, d: v14 } = G2s(v2, v6, v10, v14, msg[offset + s[j++]])); ({ a: v3, b: v7, c: v11, d: v15 } = G1s(v3, v7, v11, v15, msg[offset + s[j++]])); ({ a: v3, b: v7, c: v11, d: v15 } = G2s(v3, v7, v11, v15, msg[offset + s[j++]])); ({ a: v0, b: v5, c: v10, d: v15 } = G1s(v0, v5, v10, v15, msg[offset + s[j++]])); ({ a: v0, b: v5, c: v10, d: v15 } = G2s(v0, v5, v10, v15, msg[offset + s[j++]])); ({ a: v1, b: v6, c: v11, d: v12 } = G1s(v1, v6, v11, v12, msg[offset + s[j++]])); ({ a: v1, b: v6, c: v11, d: v12 } = G2s(v1, v6, v11, v12, msg[offset + s[j++]])); ({ a: v2, b: v7, c: v8, d: v13 } = G1s(v2, v7, v8, v13, msg[offset + s[j++]])); ({ a: v2, b: v7, c: v8, d: v13 } = G2s(v2, v7, v8, v13, msg[offset + s[j++]])); ({ a: v3, b: v4, c: v9, d: v14 } = G1s(v3, v4, v9, v14, msg[offset + s[j++]])); ({ a: v3, b: v4, c: v9, d: v14 } = G2s(v3, v4, v9, v14, msg[offset + s[j++]])); } return { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 }; } const B3_Flags = { CHUNK_START: 1, CHUNK_END: 2, PARENT: 4, ROOT: 8, KEYED_HASH: 16, DERIVE_KEY_CONTEXT: 32, DERIVE_KEY_MATERIAL: 64 }; const B3_IV = SHA256_IV.slice(); const B3_SIGMA = /* @__PURE__ */ (() => { const Id = Array.from({ length: 16 }, (_, i) => i); const permute = (arr) => [2, 6, 3, 10, 7, 0, 4, 13, 1, 11, 12, 5, 9, 14, 15, 8].map((i) => arr[i]); const res = []; for (let i = 0, v = Id; i < 7; i++, v = permute(v)) res.push(...v); return Uint8Array.from(res); })(); class BLAKE3 extends BLAKE2 { constructor(opts = {}, flags = 0) { super(64, opts.dkLen === void 0 ? 32 : opts.dkLen); this.chunkPos = 0; this.chunksDone = 0; this.flags = 0 | 0; this.stack = []; this.posOut = 0; this.bufferOut32 = new Uint32Array(16); this.chunkOut = 0; this.enableXOF = true; const { key, context } = opts; const hasContext = context !== void 0; if (key !== void 0) { if (hasContext) throw new Error('Only "key" or "context" can be specified at same time'); const k = toBytes(key).slice(); abytes(k, 32); this.IV = u32(k); swap32IfBE(this.IV); this.flags = flags | B3_Flags.KEYED_HASH; } else if (hasContext) { const ctx = toBytes(context); const contextKey = new BLAKE3({ dkLen: 32 }, B3_Flags.DERIVE_KEY_CONTEXT).update(ctx).digest(); this.IV = u32(contextKey); swap32IfBE(this.IV); this.flags = flags | B3_Flags.DERIVE_KEY_MATERIAL; } else { this.IV = B3_IV.slice(); this.flags = flags; } this.state = this.IV.slice(); this.bufferOut = u8(this.bufferOut32); } // Unused get() { return []; } set() { } b2Compress(counter, flags, buf, bufPos = 0) { const { state: s, pos } = this; const { h, l } = fromBig(BigInt(counter), true); const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = compress(B3_SIGMA, bufPos, buf, 7, s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7], B3_IV[0], B3_IV[1], B3_IV[2], B3_IV[3], h, l, pos, flags); s[0] = v0 ^ v8; s[1] = v1 ^ v9; s[2] = v2 ^ v10; s[3] = v3 ^ v11; s[4] = v4 ^ v12; s[5] = v5 ^ v13; s[6] = v6 ^ v14; s[7] = v7 ^ v15; } compress(buf, bufPos = 0, isLast = false) { let flags = this.flags; if (!this.chunkPos) flags |= B3_Flags.CHUNK_START; if (this.chunkPos === 15 || isLast) flags |= B3_Flags.CHUNK_END; if (!isLast) this.pos = this.blockLen; this.b2Compress(this.chunksDone, flags, buf, bufPos); this.chunkPos += 1; if (this.chunkPos === 16 || isLast) { let chunk = this.state; this.state = this.IV.slice(); for (let last, chunks = this.chunksDone + 1; isLast || !(chunks & 1); chunks >>= 1) { if (!(last = this.stack.pop())) break; this.buffer32.set(last, 0); this.buffer32.set(chunk, 8); this.pos = this.blockLen; this.b2Compress(0, this.flags | B3_Flags.PARENT, this.buffer32, 0); chunk = this.state; this.state = this.IV.slice(); } this.chunksDone++; this.chunkPos = 0; this.stack.push(chunk); } this.pos = 0; } _cloneInto(to) { to = super._cloneInto(to); const { IV, flags, state, chunkPos, posOut, chunkOut, stack, chunksDone } = this; to.state.set(state.slice()); to.stack = stack.map((i) => Uint32Array.from(i)); to.IV.set(IV); to.flags = flags; to.chunkPos = chunkPos; to.chunksDone = chunksDone; to.posOut = posOut; to.chunkOut = chunkOut; to.enableXOF = this.enableXOF; to.bufferOut32.set(this.bufferOut32); return to; } destroy() { this.destroyed = true; clean(this.state, this.buffer32, this.IV, this.bufferOut32); clean(...this.stack); } // Same as b2Compress, but doesn't modify state and returns 16 u32 array (instead of 8) b2CompressOut() { const { state: s, pos, flags, buffer32, bufferOut32: out32 } = this; const { h, l } = fromBig(BigInt(this.chunkOut++)); swap32IfBE(buffer32); const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = compress(B3_SIGMA, 0, buffer32, 7, s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7], B3_IV[0], B3_IV[1], B3_IV[2], B3_IV[3], l, h, pos, flags); out32[0] = v0 ^ v8; out32[1] = v1 ^ v9; out32[2] = v2 ^ v10; out32[3] = v3 ^ v11; out32[4] = v4 ^ v12; out32[5] = v5 ^ v13; out32[6] = v6 ^ v14; out32[7] = v7 ^ v15; out32[8] = s[0] ^ v8; out32[9] = s[1] ^ v9; out32[10] = s[2] ^ v10; out32[11] = s[3] ^ v11; out32[12] = s[4] ^ v12; out32[13] = s[5] ^ v13; out32[14] = s[6] ^ v14; out32[15] = s[7] ^ v15; swap32IfBE(buffer32); swap32IfBE(out32); this.posOut = 0; } finish() { if (this.finished) return; this.finished = true; clean(this.buffer.subarray(this.pos)); let flags = this.flags | B3_Flags.ROOT; if (this.stack.length) { flags |= B3_Flags.PARENT; swap32IfBE(this.buffer32); this.compress(this.buffer32, 0, true); swap32IfBE(this.buffer32); this.chunksDone = 0; this.pos = this.blockLen; } else { flags |= (!this.chunkPos ? B3_Flags.CHUNK_START : 0) | B3_Flags.CHUNK_END; } this.flags = flags; this.b2CompressOut(); } writeInto(out) { aexists(this, false); abytes(out); this.finish(); const { blockLen, bufferOut } = this; for (let pos = 0, len = out.length; pos < len; ) { if (this.posOut >= blockLen) this.b2CompressOut(); const take = Math.min(blockLen - this.posOut, len - pos); out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); this.posOut += take; pos += take; } return out; } xofInto(out) { if (!this.enableXOF) throw new Error("XOF is not possible after digest call"); return this.writeInto(out); } xof(bytes) { anumber(bytes); return this.xofInto(new Uint8Array(bytes)); } digestInto(out) { aoutput(out, this); if (this.finished) throw new Error("digest() was already called"); this.enableXOF = false; this.writeInto(out); this.destroy(); return out; } digest() { return this.digestInto(new Uint8Array(this.outputLen)); } } const blake3 = /* @__PURE__ */ createXOFer((opts) => new BLAKE3(opts)); const _0n = BigInt(0); const _1n = BigInt(1); const _2n = BigInt(2); const _7n = BigInt(7); const _256n = BigInt(256); const _0x71n = BigInt(113); const SHA3_PI = []; const SHA3_ROTL = []; const _SHA3_IOTA = []; for (let round = 0, R = _1n, x = 1, y = 0; round < 24; round++) { [x, y] = [y, (2 * x + 3 * y) % 5]; SHA3_PI.push(2 * (5 * y + x)); SHA3_ROTL.push((round + 1) * (round + 2) / 2 % 64); let t = _0n; for (let j = 0; j < 7; j++) { R = (R << _1n ^ (R >> _7n) * _0x71n) % _256n; if (R & _2n) t ^= _1n << (_1n << /* @__PURE__ */ BigInt(j)) - _1n; } _SHA3_IOTA.push(t); } const IOTAS = split(_SHA3_IOTA, true); const SHA3_IOTA_H = IOTAS[0]; const SHA3_IOTA_L = IOTAS[1]; const rotlH = (h, l, s) => s > 32 ? rotlBH(h, l, s) : rotlSH(h, l, s); const rotlL = (h, l, s) => s > 32 ? rotlBL(h, l, s) : rotlSL(h, l, s); function keccakP(s, rounds = 24) { const B = new Uint32Array(5 * 2); for (let round = 24 - rounds; round < 24; round++) { for (let x = 0; x < 10; x++) B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; for (let x = 0; x < 10; x += 2) { const idx1 = (x + 8) % 10; const idx0 = (x + 2) % 10; const B0 = B[idx0]; const B1 = B[idx0 + 1]; const Th = rotlH(B0, B1, 1) ^ B[idx1]; const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; for (let y = 0; y < 50; y += 10) { s[x + y] ^= Th; s[x + y + 1] ^= Tl; } } let curH = s[2]; let curL = s[3]; for (let t = 0; t < 24; t++) { const shift = SHA3_ROTL[t]; const Th = rotlH(curH, curL, shift); const Tl = rotlL(curH, curL, shift); const PI = SHA3_PI[t]; curH = s[PI]; curL = s[PI + 1]; s[PI] = Th; s[PI + 1] = Tl; } for (let y = 0; y < 50; y += 10) { for (let x = 0; x < 10; x++) B[x] = s[y + x]; for (let x = 0; x < 10; x++) s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; } s[0] ^= SHA3_IOTA_H[round]; s[1] ^= SHA3_IOTA_L[round]; } clean(B); } class Keccak extends Hash { // NOTE: we accept arguments in bytes instead of bits here. constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { super(); this.pos = 0; this.posOut = 0; this.finished = false; this.destroyed = false; this.enableXOF = false; this.blockLen = blockLen; this.suffix = suffix; this.outputLen = outputLen; this.enableXOF = enableXOF; this.rounds = rounds; anumber(outputLen); if (!(0 < blockLen && blockLen < 200)) throw new Error("only keccak-f1600 function is supported"); this.state = new Uint8Array(200); this.state32 = u32(this.state); } clone() { return this._cloneInto(); } keccak() { swap32IfBE(this.state32); keccakP(this.state32, this.rounds); swap32IfBE(this.state32); this.posOut = 0; this.pos = 0; } update(data) { aexists(this); data = toBytes(data); abytes(data); const { blockLen, state } = this; const len = data.length; for (let pos = 0; pos < len; ) { const take = Math.min(blockLen - this.pos, len - pos); for (let i = 0; i < take; i++) state[this.pos++] ^= data[pos++]; if (this.pos === blockLen) this.keccak(); } return this; } finish() { if (this.finished) return; this.finished = true; const { state, suffix, pos, blockLen } = this; state[pos] ^= suffix; if ((suffix & 128) !== 0 && pos === blockLen - 1) this.keccak(); state[blockLen - 1] ^= 128; this.keccak(); } writeInto(out) { aexists(this, false); abytes(out); this.finish(); const bufferOut = this.state; const { blockLen } = this; for (let pos = 0, len = out.length; pos < len; ) { if (this.posOut >= blockLen) this.keccak(); const take = Math.min(blockLen - this.posOut, len - pos); out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); this.posOut += take; pos += take; } return out; } xofInto(out) { if (!this.enableXOF) throw new Error("XOF is not possible for this instance"); return this.writeInto(out); } xof(bytes) { anumber(bytes); return this.xofInto(new Uint8Array(bytes)); } digestInto(out) { aoutput(out, this); if (this.finished) throw new Error("digest() was already called"); this.writeInto(out); this.destroy(); return out; } digest() { return this.digestInto(new Uint8Array(this.outputLen)); } destroy() { this.destroyed = true; clean(this.state); } _cloneInto(to) { const { blockLen, suffix, outputLen, rounds, enableXOF } = this; to || (to = new Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); to.state32.set(this.state32); to.pos = this.pos; to.posOut = this.posOut; to.finished = this.finished; to.rounds = rounds; to.suffix = suffix; to.outputLen = outputLen; to.enableXOF = enableXOF; to.destroyed = this.destroyed; return to; } } const gen = (suffix, blockLen, outputLen) => createHasher(() => new Keccak(blockLen, suffix, outputLen)); const sha3_512 = /* @__PURE__ */ (() => gen(6, 72, 512 / 8))(); const branchHashOptions = { blake3: (src) => { return blake3(new Uint8Array(src)).buffer; }, sha3_512: (src) => { return sha3_512(new Uint8Array(src)).buffer; } }; function bufferToHex(buf) { return new Uint8Array(buf).reduce((acc, curr) => { return acc + hexPrecompute[curr]; }, ""); } const hexPrecompute = [ "00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "0a", "0b", "0c", "0d", "0e", "0f", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "1a", "1b", "1c", "1d", "1e", "1f", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "2a", "2b", "2c", "2d", "2e", "2f", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "3a", "3b", "3c", "3d", "3e", "3f", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "4a", "4b", "4c", "4d", "4e", "4f", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "5a", "5b", "5c", "5d", "5e", "5f", "60", "61", "62", "63", "64", "65", "66", "67", "68", "69", "6a", "6b", "6c", "6d", "6e", "6f", "70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "7a", "7b", "7c", "7d", "7e", "7f", "80", "81", "82", "83", "84", "85", "86", "87", "88", "89", "8a", "8b", "8c", "8d", "8e", "8f", "90", "91", "92", "93", "94", "95", "96", "97", "98", "99", "9a", "9b", "9c", "9d", "9e", "9f", "a0", "a1", "a2", "a3", "a4", "a5", "a6", "a7", "a8", "a9", "aa", "ab", "ac", "ad", "ae", "af", "b0", "b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8", "b9", "ba", "bb", "bc", "bd", "be", "bf", "c0", "c1", "c2", "c3", "c4", "c5", "c6", "c7", "c8", "c9", "ca", "cb", "cc", "cd", "ce", "cf", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9", "da", "db", "dc", "dd", "de", "df", "e0", "e1", "e2", "e3", "e4", "e5", "e6", "e7", "e8", "e9", "ea", "eb", "ec", "ed", "ee", "ef", "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "fa", "fb", "fc", "fd", "fe", "ff" ]; function arrayBuffersMatch(a, b) { if (a.byteLength !== b.byteLength) { return false; } else { switch (0) { case a.byteLength % 4: return typedArraysMatch(new Uint32Array(a), new Uint32Array(b)); case a.byteLength % 2: return typedArraysMatch(new Uint16Array(a), new Uint16Array(b)); case a.byteLength % 1: return typedArraysMatch(new Uint8Array(a), new Uint8Array(b)); default: throw new Error("Unexpected Result, bad math?"); } } } function typedArraysMatch(a, b) { if (a.byteLength !== b.byteLength) { return false; } else { return a.every((val, i) => val === b[i]); } } class Merkletree { /** * @constructor Merkletree * @protected */ constructor(r, p, sap, n, h, u, s) { __publicField(this, "root"); __publicField(this, "source"); __publicField(this, "nodes"); __publicField(this, "hash"); __publicField(this, "salted"); __publicField(this, "sorted"); this.root = r; this.source = p ? sap : []; this.nodes = p ? n : []; this.hash = h; this.salted = u; this.sorted = s; } /** * @constructor Merkletree * @param {IMerkletreeSource} input - Merkletree creation parameters. * @returns {Promise<IMerkletree>} */ static async grow(input) { let { seed, sapling, chunkSize, hashType = "sha3_512", useSalt = false, sort = false, preserve = true } = input; if (sapling) ; else if (seed && chunkSize) { sapling = []; for (let i = 0, ii = 0; i < seed.byteLength; i += chunkSize, ii++) { const bufChunk = seed.slice(i, i + chunkSize); const str = ii.toString() + bufferToHex(bufChunk); const hashName = await crypto.subtle.digest("SHA-256", stringToUint8(str)); sapling.push(hashName); } } else { throw new Error("No Data Provided!"); } if (!Object.keys(branchHashOptions).includes(hashType)) { throw new Error(`Unsupported hashType of "${hashType}"!`); } const hashFunc = branchHashOptions[hashType]; const branchesLen = calculateBranches(sapling.length); const nodeLen = branchesLen + sapling.length + (branchesLen - sapling.length); const nodes = Array(nodeLen).fill(new ArrayBuffer(64)); for (let i = 0; i < sapling.length; i++) { nodes[branchesLen + i] = hashFunc(sapling[i]); } for (let i = branchesLen - 1; i > 0; i--) { const left = nodes[i * 2]; const right = nodes[i * 2 + 1]; const concat = await new Blob([left, right]).arrayBuffer(); nodes[i] = hashFunc(concat); } return new Merkletree(nodes[1], preserve, sapling, nodes, hashType, useSalt, sort); } getRoot() { return this.root; } getRootAsHex() { return bufferToHex(this.root); } getSalt() { return this.salted; } generatePollard(height) { if (this.nodes.length === 0) { throw new Error("Data was not preserved!"); } else { return new Pollard(this.nodes.slice(1, Math.pow(2, height + 1)), this.hash, height); } } generateProof(data, height) { if (this.nodes.length === 0) { throw new Error("Data was not preserved!"); } else { const index = indexOf(data, this.source); if (index === -1) { throw new Error("Data is not present!"); } else { const proofLen = Math.ceil(Math.log2(this.source.length)) - height; const hashes = Array(proofLen).fill(new ArrayBuffer(64)); const it = index + this.nodes.length / 2; const limit = Math.pow(2, height + 1) - 1; for (let i = it, ii = 0; i > limit; i /= 2, ii++) { hashes[ii] = this.nodes[i ^ 1]; } return new Proof(hashes, index, this.hash, this.salted, this.generatePollard(height)); } } } } class MerkletreeCompact { /** * @constructor MerkletreeCompact * @protected */ constructor(r) { __publicField(this, "root"); this.root = r; } /** * @constructor MerkletreeCompact * @param {IMerkletreeCompactSource} input - Merkletree Compact creation parameters. * @returns {Promise<IMerkletreeCompact>} */ static async grow(input) { let { raw, seed, sapling, chunkSize, hashType = "sha3_512" } = input; switch (true) { case !!sapling: if (!sapling) { throw new Error("No Data Provided!"); } break; case (!!seed && !!chunkSize): if (!seed || !chunkSize) { throw new Error("No Data Provided!"); } sapling = []; for (let i = 0, ii = 0; i < seed.byteLength; i += chunkSize, ii++) { const bufChunk = seed.slice(i, i + chunkSize); const str = ii.toString() + bufferToHex(bufChunk); const hashName = await crypto.subtle.digest("SHA-256", stringToUint8(str)); sapling.push(hashName); } break; case (!!raw && !!chunkSize): if (!raw || !chunkSize) { throw new Error("No Data Provided!"); } sapling = []; for (let i = 0, ii = 0; i < raw.size; i += chunkSize, ii++) { const blobChunk = raw.slice(i, i + chunkSize); const str = ii.toString() + bufferToHex(await blobChunk.arrayBuffer()); const hashName = await crypto.subtle.digest("SHA-256", stringToUint8(str)); sapling.push(hashName); } break; default: throw new Error("No Data Provided!"); } if (!Object.keys(branchHashOptions).includes(hashType)) { throw new Error(`Unsupported hashType of "${hashType}"!`); } const hashFunc = branchHashOptions[hashType]; const branchesLen = calculateBranches(sapling.length); let queue = Array(branchesLen).fill(new ArrayBuffer(64)); for (let i = 0; i < sapling.length; i++) { queue[i] = hashFunc(sapling[i]); } let run = queue.length > 1; while (run) { const cycle = []; for (let i = 0; i < queue.length / 2; i++) { cycle[i] = merkle(queue[i * 2], queue[i * 2 + 1], hashFunc); } queue = cycle; run = queue.length > 1; } return new MerkletreeCompact(queue[0]); } getRoot() { return this.root; } getRootAsHex() { return bufferToHex(this.root); } } function merkle(left, right, hashFunc) { const length = left.byteLength + right.byteLength; const final = new Uint8Array(length); const uLeft = new Uint8Array(left); const uRight = new Uint8Array(right); final.set(uLeft, 0); final.set(uRight, left.byteLength); return hashFunc(final); } function indexOf(needle, haystack) { for (let i = 0; i < haystack.length; i++) { if (arrayBuffersMatch(needle, haystack[i])) { return i; } } return -1; } class MultiProof { constructor() { } async verify(data) { console.log(data); return false; } } class Pollard { /** * @constructor Pollard * @param {Array<ArrayBuffer>} hashes - Merkletree hashes. * @param {TBranchHashOptionKeys} hashType - Merkletree hash type used. * @param {number} height - Height in Merkletree. */ constructor(hashes, hashType, height) { __publicField(this, "hashes"); __publicField(this, "hashType"); __publicField(this, "height"); this.hashes = hashes; this.hashType = hashType; this.height = height; } getHashes() { return this.hashes; } getHeight() { return this.height; } getLength() { return this.hashes.length; } async verify() { if (this.hashes.length === 1) { return true; } else { const hashFunc = branchHashOptions[this.hashType]; for (let i = this.hashes.length / 2 - 1; i >= 0; i--) { const left = this.hashes[i * 2 + 1]; const right = this.hashes[i * 2 + 2]; const concat = await new Blob([left, right]).arrayBuffer(); if (!arrayBuffersMatch(this.hashes[i], hashFunc(concat))) { return false; } } return true; } } } class Proof { /** * @constructor Proof * @param {Array<ArrayBuffer>} hashes - Merkletree hashes. * @param {number} index - Merkletree index. * @param {TBranchHashOptionKeys} hashType - Merkletree hash type used. * @param {boolean} salted - If merkletree was salted. * @param {IPollard} pollard - Pollard instance to use as source. */ constructor(hashes, index, hashType, salted, pollard) { __publicField(this, "hashes"); __publicField(this, "index"); __publicField(this, "hashType"); __publicField(this, "salted"); __publicField(this, "pollard"); this.hashes = hashes; this.index = index; this.hashType = hashType; this.salted = salted; this.pollard = pollard; } async verify(data) { const hashFunc = branchHashOptions[this.hashType]; let proofHash = new ArrayBuffer(0); if (this.salted) ; else { proofHash = hashFunc(data); } const ind = this.index + (1 << this.hashes.length); for (let i = 0, ii = ind; i < this.hashes.length; i++, ii >>= 1) { if (ii % 2 === 0) { const concat = await new Blob([proofHash, this.hashes[i]]).arrayBuffer(); proofHash = hashFunc(concat); } else { const concat = await new Blob([this.hashes[i], proofHash]).arrayBuffer(); proofHash = hashFunc(concat); } } const pHashes = this.pollard.getHashes(); for (let i = 0; i < pHashes.length / 2 + 1; i++) { if (arrayBuffersMatch(pHashes[pHashes.length - 1 - i], proofHash)) { return true; } } return false; } } export { Merkletree, MerkletreeCompact, MultiProof, Pollard, Proof };