UNPKG

snarkjs

Version:

zkSNARKs implementation in JavaScript

193 lines (143 loc) 7.51 kB
/* Copyright 2018 0KIMS association. This file is part of snarkJS. snarkJS is a free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. snarkJS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with snarkJS. If not, see <https://www.gnu.org/licenses/>. */ import Blake2b from "blake2b-wasm"; import * as utils from "./powersoftau_utils.js"; import * as misc from "./misc.js"; import * as binFileUtils from "@iden3/binfileutils"; export default async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) { const beaconHash = misc.hex2ByteArray(beaconHashStr); if ( (beaconHash.byteLength == 0) || (beaconHash.byteLength*2 !=beaconHashStr.length)) { if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)"); return false; } if (beaconHash.length>=256) { if (logger) logger.error("Maximum length of beacon hash is 255 bytes"); return false; } numIterationsExp = parseInt(numIterationsExp); if ((numIterationsExp<10)||(numIterationsExp>63)) { if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)"); return false; } await Blake2b.ready(); const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1); const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections); if (power != ceremonyPower) { if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file."); return false; } if (sections[12]) { if (logger) logger.warn("Contributing into a file that has phase2 calculated. You will have to prepare phase2 again."); } const contributions = await utils.readContributions(fdOld, curve, sections); const curContribution = { name: name, type: 1, // Beacon numIterationsExp: numIterationsExp, beaconHash: beaconHash }; let lastChallengeHash; if (contributions.length>0) { lastChallengeHash = contributions[contributions.length-1].nextChallenge; } else { lastChallengeHash = utils.calculateFirstChallengeHash(curve, power, logger); } curContribution.key = await utils.keyFromBeacon(curve, lastChallengeHash, beaconHash, numIterationsExp); const responseHasher = new Blake2b(64); responseHasher.update(lastChallengeHash); const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7); await utils.writePTauHeader(fdNew, curve, power); const startSections = []; let firstPoints; firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1", logger ); curContribution.tauG1 = firstPoints[1]; firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2", logger ); curContribution.tauG2 = firstPoints[1]; firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1", logger ); curContribution.alphaG1 = firstPoints[0]; firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1", logger ); curContribution.betaG1 = firstPoints[0]; firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2", logger ); curContribution.betaG2 = firstPoints[0]; curContribution.partialHash = responseHasher.getPartialHash(); const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3); utils.toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false); responseHasher.update(new Uint8Array(buffKey)); const hashResponse = responseHasher.digest(); if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: ")); const nextChallengeHasher = new Blake2b(64); nextChallengeHasher.update(hashResponse); await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger); await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2", logger); await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger); await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger); await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger); curContribution.nextChallenge = nextChallengeHasher.digest(); if (logger) logger.info(misc.formatHash(curContribution.nextChallenge, "Next Challenge Hash: ")); contributions.push(curContribution); await utils.writeContributions(fdNew, curve, contributions); await fdOld.close(); await fdNew.close(); return hashResponse; async function processSection(sectionId, groupName, NPoints, first, inc, sectionName, logger) { const res = []; fdOld.pos = sections[sectionId][0].p; await binFileUtils.startWriteSection(fdNew, sectionId); startSections[sectionId] = fdNew.pos; const G = curve[groupName]; const sG = G.F.n8*2; const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks let t = first; for (let i=0 ; i<NPoints ; i+= chunkSize) { if (logger) logger.debug(`applying key${sectionName}: ${i}/${NPoints}`); const n= Math.min(NPoints-i, chunkSize ); const buffIn = await fdOld.read(n * sG); const buffOutLEM = await G.batchApplyKey(buffIn, t, inc); /* Code to test the case where we don't have the 2^m-2 component if (sectionName== "tauG1") { const bz = new Uint8Array(64); buffOutLEM.set(bz, 64*((2 ** power) - 1 )); } */ const promiseWrite = fdNew.write(buffOutLEM); const buffOutC = await G.batchLEMtoC(buffOutLEM); responseHasher.update(buffOutC); await promiseWrite; if (i==0) // Return the 2 first points. for (let j=0; j<Math.min(2, NPoints); j++) res.push(G.fromRprLEM(buffOutLEM, j*sG)); t = curve.Fr.mul(t, curve.Fr.exp(inc, n)); } await binFileUtils.endWriteSection(fdNew); return res; } async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName, logger) { const G = curve[groupName]; const sG = G.F.n8*2; const nPointsChunk = Math.floor((1<<24)/sG); const oldPos = fdTo.pos; fdTo.pos = startSections[sectionId]; for (let i=0; i< nPoints; i += nPointsChunk) { if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`); const n = Math.min(nPoints-i, nPointsChunk); const buffLEM = await fdTo.read(n * sG); const buffU = await G.batchLEMtoU(buffLEM); nextChallengeHasher.update(buffU); } fdTo.pos = oldPos; } }