UNPKG

snarkjs

Version:

zkSNARKs implementation in JavaScript

79 lines (63 loc) 2.94 kB
/* Copyright 2018 0KIMS association. This file is part of snarkJS. snarkJS is a free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. snarkJS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with snarkJS. If not, see <https://www.gnu.org/licenses/>. */ import * as binFileUtils from "@iden3/binfileutils"; /* This function creates a new section in the fdTo file with id idSection. It multiplies the points in fdFrom by first, first*inc, first*inc^2, .... nPoint Times. It also updates the newChallengeHasher with the new points */ export async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, logger) { const MAX_CHUNK_SIZE = 1 << 16; const G = curve[groupName]; const sG = G.F.n8*2; const nPoints = sections[idSection][0].size / sG; await binFileUtils.startReadUniqueSection(fdOld, sections,idSection ); await binFileUtils.startWriteSection(fdNew, idSection); let t = first; for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) { if (logger) logger.debug(`Applying key: ${sectionName}: ${i}/${nPoints}`); const n= Math.min(nPoints - i, MAX_CHUNK_SIZE); let buff; buff = await fdOld.read(n*sG); buff = await G.batchApplyKey(buff, t, inc); await fdNew.write(buff); t = curve.Fr.mul(t, curve.Fr.exp(inc, n)); } await binFileUtils.endWriteSection(fdNew); await binFileUtils.endReadSection(fdOld); } export async function applyKeyToChallengeSection(fdOld, fdNew, responseHasher, curve, groupName, nPoints, first, inc, formatOut, sectionName, logger) { const G = curve[groupName]; const sG = G.F.n8*2; const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks let t = first; for (let i=0 ; i<nPoints ; i+= chunkSize) { if (logger) logger.debug(`Applying key ${sectionName}: ${i}/${nPoints}`); const n= Math.min(nPoints-i, chunkSize ); const buffInU = await fdOld.read(n * sG); const buffInLEM = await G.batchUtoLEM(buffInU); const buffOutLEM = await G.batchApplyKey(buffInLEM, t, inc); let buffOut; if (formatOut == "COMPRESSED") { buffOut = await G.batchLEMtoC(buffOutLEM); } else { buffOut = await G.batchLEMtoU(buffOutLEM); } if (responseHasher) responseHasher.update(buffOut); await fdNew.write(buffOut); t = curve.Fr.mul(t, curve.Fr.exp(inc, n)); } }