@tkey-mpc/core
Version:
TKey Core library
1,314 lines (1,281 loc) • 103 kB
JavaScript
import _defineProperty from '@babel/runtime/helpers/defineProperty';
import { TkeyError, secp256k1, generatePrivateExcludingIndexes, Polynomial, Point, PublicPolynomial, decrypt, toPrivKeyECC, ShareStore, getPubKeyPoint, PublicShare, Share, stripHexPrefix, toPrivKeyEC, KEY_NOT_FOUND, generateSalt, prettyPrintError, randomSelection, SHARE_DELETED, hexPoint, RSSClient, ecPoint, encrypt, getPubKeyECC, ONE_KEY_DELETE_NONCE } from '@tkey-mpc/common-types';
import { keccak256 } from '@toruslabs/torus.js';
import stringify from 'json-stable-stringify';
import _objectSpread from '@babel/runtime/helpers/objectSpread2';
import BN from 'bn.js';
import { generatePrivate } from '@toruslabs/eccrypto';
/**
* CoreError, extension for Error using CustomError
* details: github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work
*
* Usage:
* 1. throw CoreError.metadataUndefined() // regularly used errors
* 2. throw CoreError.fromCode(1304); // throw via code
* 3. throw new CoreError(1000, "share indexes should be unique"); // for scarce errors
*
* Guide:
* 1000 - core
* 2000 - security questions
* 3000 - webstorage
* 4000 - common types (code reserved for future implementation)
* 5000 - private key
* 6000 - seed phrase
* 7000 - share serialization
* 8000 - share transfer
*/
class CoreError extends TkeyError {
constructor(code, message) {
// takes care of stack and proto
super(code, message);
// Set name explicitly as minification can mangle class names
Object.defineProperty(this, "name", {
value: "CoreError"
});
}
static fromCode(code, extraMessage = "") {
return new CoreError(code, `${CoreError.messages[code]} ${extraMessage}`);
}
static default(extraMessage = "") {
return new CoreError(1000, `${CoreError.messages[1000]} ${extraMessage}`);
}
// Custom methods
// Metadata
static metadataUndefined(extraMessage = "") {
return CoreError.fromCode(1101, extraMessage);
}
static delete1OutOf1OnlyManualSync(extraMessage = "") {
return CoreError.fromCode(1601, extraMessage);
}
static metadataGetFailed(extraMessage = "") {
return CoreError.fromCode(1102, extraMessage);
}
static metadataPostFailed(extraMessage = "") {
return CoreError.fromCode(1103, extraMessage);
}
static accountSaltUndefined(extraMessage = "") {
return CoreError.fromCode(1106, extraMessage);
}
// TkeyData
static tkeyStoreInvalid(extraMessage = "") {
return CoreError.fromCode(1201, extraMessage);
}
static tkeyEncryptionFailed(extraMessage = "") {
return CoreError.fromCode(1202, extraMessage);
}
static tkeyDecryptionFailed(extraMessage = "") {
return CoreError.fromCode(1203, extraMessage);
}
// Shares
static privateKeyUnavailable(extraMessage = "") {
return CoreError.fromCode(1301, extraMessage);
}
static unableToReconstruct(extraMessage = "") {
return CoreError.fromCode(1302, extraMessage);
}
static incorrectReconstruction(extraMessage = "") {
return CoreError.fromCode(1303, extraMessage);
}
static encryptedShareStoreUnavailable(extraMessage = "") {
return CoreError.fromCode(1306, extraMessage);
}
// Metadata locks
static acquireLockFailed(extraMessage = "") {
return CoreError.fromCode(1401, extraMessage);
}
static releaseLockFailed(extraMessage = "") {
return CoreError.fromCode(1402, extraMessage);
}
// Authmetadata
static privKeyUnavailable(extraMessage = "") {
return CoreError.fromCode(1501, extraMessage);
}
static metadataPubKeyUnavailable(extraMessage = "") {
return CoreError.fromCode(1502, extraMessage);
}
static authMetadataGetUnavailable(extraMessage = "") {
return CoreError.fromCode(1503, extraMessage);
}
static authMetadataSetUnavailable(extraMessage = "") {
return CoreError.fromCode(1504, extraMessage);
}
}
_defineProperty(CoreError, "messages", {
1000: "Custom",
// Misc
1001: "Unable to delete service provider share",
1002: "Wrong share index",
1003: "Unable to updateSDK",
// metadata
1101: "metadata not found, SDK likely not initialized",
1102: "getMetadata errored",
1103: "setMetadata errored",
1104: "previouslyFetchedCloudMetadata provided in initialization is outdated",
1105: "previouslyFetchedCloudMetadata.nonce should never be higher than the latestShareDetails, please contact support",
1106: "Account Salt is absent, required for nonce generation.Make sure key is reconstructed",
// tKeystore
1201: "Invalid tkeyStore",
1202: "Encryption failed",
1203: "Decryption failed",
// shares
1301: "Private key not available. Please reconstruct key first",
1302: "Unable to reconstruct",
1303: "reconstructed key is not pub key",
1304: "Share found in unexpected polynomial",
1305: "Input is not supported",
1306: "no encrypted share store for share exists",
1307: "Share doesn't exist",
1308: "Share was deleted",
// lock
1401: "Unable to acquire lock",
1402: "Unable to release lock",
// auth metadata
1501: "privkey unavailable",
1502: "metadata pubkey unavailable",
1503: "getAuthMetadata errored",
1504: "setAuthMetadata errored",
1601: "delete1OutOf1 requires manualSync=true"
});
const generateEmptyBNArray = length => Array.from({
length
}, () => new BN(0));
const denominator = (i, innerPoints) => {
let result = new BN(1);
const xi = innerPoints[i].x;
for (let j = innerPoints.length - 1; j >= 0; j -= 1) {
if (i !== j) {
let tmp = new BN(xi);
tmp = tmp.sub(innerPoints[j].x);
tmp = tmp.umod(secp256k1.curve.n);
result = result.mul(tmp);
result = result.umod(secp256k1.curve.n);
}
}
return result;
};
const interpolationPoly = (i, innerPoints) => {
let coefficients = generateEmptyBNArray(innerPoints.length);
const d = denominator(i, innerPoints);
if (d.cmp(new BN(0)) === 0) {
throw CoreError.default("Denominator for interpolationPoly is 0");
}
coefficients[0] = d.invm(secp256k1.curve.n);
for (let k = 0; k < innerPoints.length; k += 1) {
const newCoefficients = generateEmptyBNArray(innerPoints.length);
if (k !== i) {
let j;
if (k < i) {
j = k + 1;
} else {
j = k;
}
j -= 1;
for (; j >= 0; j -= 1) {
newCoefficients[j + 1] = newCoefficients[j + 1].add(coefficients[j]);
newCoefficients[j + 1] = newCoefficients[j + 1].umod(secp256k1.curve.n);
let tmp = new BN(innerPoints[k].x);
tmp = tmp.mul(coefficients[j]);
tmp = tmp.umod(secp256k1.curve.n);
newCoefficients[j] = newCoefficients[j].sub(tmp);
newCoefficients[j] = newCoefficients[j].umod(secp256k1.curve.n);
}
coefficients = newCoefficients;
}
}
return coefficients;
};
const pointSort = innerPoints => {
const pointArrClone = [...innerPoints];
pointArrClone.sort((a, b) => a.x.cmp(b.x));
return pointArrClone;
};
const lagrange = unsortedPoints => {
const sortedPoints = pointSort(unsortedPoints);
const polynomial = generateEmptyBNArray(sortedPoints.length);
for (let i = 0; i < sortedPoints.length; i += 1) {
const coefficients = interpolationPoly(i, sortedPoints);
for (let k = 0; k < sortedPoints.length; k += 1) {
let tmp = new BN(sortedPoints[i].y);
tmp = tmp.mul(coefficients[k]);
polynomial[k] = polynomial[k].add(tmp);
polynomial[k] = polynomial[k].umod(secp256k1.curve.n);
}
}
return new Polynomial(polynomial);
};
function lagrangeInterpolatePolynomial(points) {
return lagrange(points);
}
function lagrangeInterpolation(shares, nodeIndex) {
if (shares.length !== nodeIndex.length) {
throw CoreError.default("shares not equal to nodeIndex length in lagrangeInterpolation");
}
let secret = new BN(0);
for (let i = 0; i < shares.length; i += 1) {
let upper = new BN(1);
let lower = new BN(1);
for (let j = 0; j < shares.length; j += 1) {
if (i !== j) {
upper = upper.mul(nodeIndex[j].neg());
upper = upper.umod(secp256k1.curve.n);
let temp = nodeIndex[i].sub(nodeIndex[j]);
temp = temp.umod(secp256k1.curve.n);
lower = lower.mul(temp).umod(secp256k1.curve.n);
}
}
let delta = upper.mul(lower.invm(secp256k1.curve.n)).umod(secp256k1.curve.n);
delta = delta.mul(shares[i]).umod(secp256k1.curve.n);
secret = secret.add(delta);
}
return secret.umod(secp256k1.curve.n);
}
// generateRandomPolynomial - determinisiticShares are assumed random
function generateRandomPolynomial(degree, secret, deterministicShares) {
let actualS = secret;
if (!secret) {
actualS = generatePrivateExcludingIndexes([new BN(0)]);
}
if (!deterministicShares) {
const poly = [actualS];
for (let i = 0; i < degree; i += 1) {
const share = generatePrivateExcludingIndexes(poly);
poly.push(share);
}
return new Polynomial(poly);
}
if (!Array.isArray(deterministicShares)) {
throw CoreError.default("deterministic shares in generateRandomPolynomial should be an array");
}
if (deterministicShares.length > degree) {
throw CoreError.default("deterministicShares in generateRandomPolynomial should be less or equal than degree to ensure an element of randomness");
}
const points = {};
deterministicShares.forEach(share => {
points[share.shareIndex.toString("hex")] = new Point(share.shareIndex, share.share);
});
for (let i = 0; i < degree - deterministicShares.length; i += 1) {
let shareIndex = generatePrivateExcludingIndexes([new BN(0)]);
while (points[shareIndex.toString("hex")] !== undefined) {
shareIndex = generatePrivateExcludingIndexes([new BN(0)]);
}
points[shareIndex.toString("hex")] = new Point(shareIndex, new BN(generatePrivate()));
}
points["0"] = new Point(new BN(0), actualS);
return lagrangeInterpolatePolynomial(Object.values(points));
}
// 2 + 3x = y | secret for index 1 is 5 >>> g^5 is the commitment | now we have g^2, g^3 and 1, |
function polyCommitmentEval(polyCommitments, index) {
// convert to base points, this is badly written, its the only way to access the point rn zzz TODO: refactor
const basePtPolyCommitments = [];
for (let i = 0; i < polyCommitments.length; i += 1) {
const key = secp256k1.keyFromPublic({
x: polyCommitments[i].x.toString("hex"),
y: polyCommitments[i].y.toString("hex")
}, "");
basePtPolyCommitments.push(key.getPublic());
}
let shareCommitment = basePtPolyCommitments[0];
for (let i = 1; i < basePtPolyCommitments.length; i += 1) {
const factor = index.pow(new BN(i)).umod(secp256k1.n);
const e = basePtPolyCommitments[i].mul(factor);
shareCommitment = shareCommitment.add(e);
}
return new Point(shareCommitment.getX(), shareCommitment.getY());
}
function dotProduct(arr1, arr2, modulus = new BN(0)) {
if (arr1.length !== arr2.length) {
throw new Error("arrays of different lengths");
}
let sum = new BN(0);
for (let i = 0; i < arr1.length; i++) {
sum = sum.add(arr1[i].mul(arr2[i]));
if (modulus.cmp(new BN(0)) !== 0) {
sum = sum.umod(modulus);
}
}
return sum;
}
const kCombinations = (s, k) => {
let set = s;
if (typeof set === "number") {
set = Array.from({
length: set
}, (_, i) => i);
}
if (k > set.length || k <= 0) {
return [];
}
if (k === set.length) {
return [set];
}
if (k === 1) {
return set.reduce((acc, cur) => [...acc, [cur]], []);
}
const combs = [];
let tailCombs = [];
for (let i = 0; i <= set.length - k + 1; i += 1) {
tailCombs = kCombinations(set.slice(i + 1), k - 1);
for (let j = 0; j < tailCombs.length; j += 1) {
combs.push([set[i], ...tailCombs[j]]);
}
}
return combs;
};
function getLagrangeCoeffs(_allIndexes, _myIndex, _target = 0) {
const allIndexes = _allIndexes.map(i => new BN(i));
const myIndex = new BN(_myIndex);
const target = new BN(_target);
let upper = new BN(1);
let lower = new BN(1);
for (let j = 0; j < allIndexes.length; j += 1) {
if (myIndex.cmp(allIndexes[j]) !== 0) {
let tempUpper = target.sub(allIndexes[j]);
tempUpper = tempUpper.umod(secp256k1.curve.n);
upper = upper.mul(tempUpper);
upper = upper.umod(secp256k1.curve.n);
let tempLower = myIndex.sub(allIndexes[j]);
tempLower = tempLower.umod(secp256k1.curve.n);
lower = lower.mul(tempLower).umod(secp256k1.curve.n);
}
}
return upper.mul(lower.invm(secp256k1.curve.n)).umod(secp256k1.curve.n);
}
class Metadata {
constructor(input) {
_defineProperty(this, "pubKey", void 0);
_defineProperty(this, "publicPolynomials", void 0);
_defineProperty(this, "publicShares", void 0);
// Tuple of PolyID and array of ShareIndexes
_defineProperty(this, "polyIDList", void 0);
_defineProperty(this, "generalStore", void 0);
_defineProperty(this, "tkeyStore", void 0);
_defineProperty(this, "scopedStore", void 0);
_defineProperty(this, "nonce", void 0);
_defineProperty(this, "tssNonces", void 0);
_defineProperty(this, "tssPolyCommits", void 0);
_defineProperty(this, "factorPubs", void 0);
_defineProperty(this, "factorEncs", void 0);
this.tssPolyCommits = {};
this.tssNonces = {};
this.factorPubs = {};
this.factorEncs = {};
this.publicPolynomials = {};
this.publicShares = {};
this.generalStore = {};
this.tkeyStore = {};
this.scopedStore = {};
this.pubKey = input;
this.polyIDList = [];
this.nonce = 0;
}
static fromJSON(value) {
const {
pubKey,
polyIDList,
generalStore,
tkeyStore,
scopedStore,
nonce,
tssNonces,
tssPolyCommits,
factorPubs,
factorEncs
} = value;
const point = Point.fromCompressedPub(pubKey);
const metadata = new Metadata(point);
const unserializedPolyIDList = [];
if (generalStore) metadata.generalStore = generalStore;
if (tkeyStore) metadata.tkeyStore = tkeyStore;
if (scopedStore) metadata.scopedStore = scopedStore;
if (nonce) metadata.nonce = nonce;
if (tssPolyCommits) {
metadata.tssPolyCommits = {};
for (const key in tssPolyCommits) {
metadata.tssPolyCommits[key] = tssPolyCommits[key].map(obj => new Point(obj.x, obj.y));
}
}
if (tssNonces) {
metadata.tssNonces = {};
for (const key in tssNonces) {
metadata.tssNonces[key] = tssNonces[key];
}
}
if (factorPubs) {
metadata.factorPubs = {};
for (const key in factorPubs) {
metadata.factorPubs[key] = factorPubs[key].map(obj => new Point(obj.x, obj.y));
}
}
if (factorEncs) metadata.factorEncs = factorEncs;
for (let i = 0; i < polyIDList.length; i += 1) {
const serializedPolyID = polyIDList[i];
const arrPolyID = serializedPolyID.split("|");
const zeroIndex = arrPolyID.findIndex(v => v === "0x0");
const firstHalf = arrPolyID.slice(0, zeroIndex);
const secondHalf = arrPolyID.slice(zeroIndex + 1, arrPolyID.length);
// for publicPolynomials
const pubPolyID = firstHalf.join("|");
const pointCommitments = [];
firstHalf.forEach(compressedCommitment => {
pointCommitments.push(Point.fromCompressedPub(compressedCommitment));
});
const publicPolynomial = new PublicPolynomial(pointCommitments);
metadata.publicPolynomials[pubPolyID] = publicPolynomial;
// for polyIDList
unserializedPolyIDList.push([pubPolyID, secondHalf]);
}
metadata.polyIDList = unserializedPolyIDList;
return metadata;
}
getShareIndexesForPolynomial(polyID) {
const matchingPolyIDs = this.polyIDList.filter(tuple => tuple[0] === polyID);
if (matchingPolyIDs.length < 1) {
throw CoreError.default("there is no matching polyID");
} else if (matchingPolyIDs.length > 1) {
throw CoreError.default("there is more than one matching polyID");
}
return matchingPolyIDs[0][1];
}
getLatestPublicPolynomial() {
return this.publicPolynomials[this.polyIDList[this.polyIDList.length - 1][0]];
}
addPublicShare(polynomialID, publicShare) {
if (!(polynomialID in this.publicShares)) {
this.publicShares[polynomialID] = {};
}
this.publicShares[polynomialID][publicShare.shareIndex.toString("hex")] = publicShare;
}
// getPublicShare(polynomialID: PolynomialID, shareIndex: BN): PublicShare {
// }
setGeneralStoreDomain(key, obj) {
this.generalStore[key] = obj;
}
getGeneralStoreDomain(key) {
return this.generalStore[key];
}
deleteGeneralStoreDomain(key) {
delete this.generalStore[key];
}
setTkeyStoreDomain(key, arr) {
this.tkeyStore[key] = arr;
}
getTkeyStoreDomain(key) {
return this.tkeyStore[key];
}
addTSSData(tssData) {
const {
tssTag,
tssNonce,
tssPolyCommits,
factorPubs,
factorEncs
} = tssData;
if (tssNonce !== undefined) this.tssNonces[tssTag] = tssNonce;
if (tssPolyCommits) this.tssPolyCommits[tssTag] = tssPolyCommits;
if (factorPubs) this.factorPubs[tssTag] = factorPubs;
if (factorEncs) this.factorEncs[tssTag] = factorEncs;
}
// appends shares and public polynomial to metadata.
// should represent a generation of share or edit of threshold
addFromPolynomialAndShares(polynomial, shares) {
const publicPolynomial = polynomial.getPublicPolynomial();
const polyID = publicPolynomial.getPolynomialID();
this.publicPolynomials[polyID] = publicPolynomial;
const shareIndexArr = [];
if (Array.isArray(shares)) {
for (let i = 0; i < shares.length; i += 1) {
this.addPublicShare(publicPolynomial.getPolynomialID(), shares[i].getPublicShare());
shareIndexArr.push(shares[i].shareIndex.toString("hex"));
}
} else {
for (const k in shares) {
if (Object.prototype.hasOwnProperty.call(shares, k)) {
this.addPublicShare(publicPolynomial.getPolynomialID(), shares[k].getPublicShare());
shareIndexArr.push(shares[k].shareIndex.toString("hex"));
}
}
}
this.polyIDList.push([polyID, shareIndexArr]);
}
setScopedStore(domain, data) {
this.scopedStore[domain] = data;
}
async getEncryptedShare(shareStore) {
const pubShare = shareStore.share.getPublicShare();
const encryptedShareStore = this.scopedStore.encryptedShares;
if (!encryptedShareStore) {
throw CoreError.encryptedShareStoreUnavailable(`${shareStore}`);
}
const encryptedShare = encryptedShareStore[pubShare.shareCommitment.x.toString("hex")];
if (!encryptedShare) {
throw CoreError.encryptedShareStoreUnavailable(`${shareStore}`);
}
const rawDecrypted = await decrypt(toPrivKeyECC(shareStore.share.share), encryptedShare);
return ShareStore.fromJSON(JSON.parse(rawDecrypted.toString()));
}
getShareDescription() {
return this.getGeneralStoreDomain("shareDescriptions");
}
addShareDescription(shareIndex, description) {
const currentSD = this.getGeneralStoreDomain("shareDescriptions") || {};
if (currentSD[shareIndex]) {
currentSD[shareIndex].push(description);
} else {
currentSD[shareIndex] = [description];
}
this.setGeneralStoreDomain("shareDescriptions", currentSD);
}
deleteShareDescription(shareIndex, description) {
const currentSD = this.getGeneralStoreDomain("shareDescriptions");
const index = currentSD[shareIndex].indexOf(description);
if (index > -1) {
currentSD[shareIndex].splice(index, 1);
} else {
throw CoreError.default(`No share description found for the given shareIndex: ${shareIndex}
and description: ${description}`);
}
}
updateShareDescription(shareIndex, oldDescription, newDescription) {
const currentSD = this.getGeneralStoreDomain("shareDescriptions");
const index = currentSD[shareIndex].indexOf(oldDescription);
if (index > -1) {
currentSD[shareIndex][index] = newDescription;
} else {
throw CoreError.default(`No share description found for the given shareIndex:
${shareIndex} and description: ${oldDescription}`);
}
}
shareToShareStore(share) {
const pubkey = getPubKeyPoint(share);
for (let i = this.polyIDList.length - 1; i >= 0; i -= 1) {
const el = this.polyIDList[i][0];
for (let t = 0; t < this.polyIDList[i][1].length; t += 1) {
const shareIndex = this.polyIDList[i][1][t];
// find pubshare in cache if its there
let pubShare;
if (this.publicShares[el]) {
if (this.publicShares[el][shareIndex]) {
pubShare = this.publicShares[el][shareIndex];
}
}
// if not reconstruct
if (!pubShare) {
pubShare = new PublicShare(shareIndex, polyCommitmentEval(this.publicPolynomials[el].polynomialCommitments, new BN(shareIndex, "hex")));
}
if (pubShare.shareCommitment.x.eq(pubkey.x) && pubShare.shareCommitment.y.eq(pubkey.y)) {
const tempShare = new Share(pubShare.shareIndex, share);
return new ShareStore(tempShare, el);
}
}
}
{
throw CoreError.fromCode(1307);
}
}
clone() {
return Metadata.fromJSON(JSON.parse(stringify(this)));
}
toJSON() {
// squash data to serialized polyID according to spec
const serializedPolyIDList = [];
for (let i = 0; i < this.polyIDList.length; i += 1) {
const polyID = this.polyIDList[i][0];
const shareIndexes = this.polyIDList[i][1];
const sortedShareIndexes = shareIndexes.sort((a, b) => new BN(a, "hex").cmp(new BN(b, "hex")));
const serializedPolyID = polyID.split(`|`).concat("0x0").concat(...sortedShareIndexes).join("|");
serializedPolyIDList.push(serializedPolyID);
}
return _objectSpread(_objectSpread(_objectSpread(_objectSpread({
pubKey: this.pubKey.toSEC1(secp256k1, true).toString("hex"),
polyIDList: serializedPolyIDList,
scopedStore: this.scopedStore,
generalStore: this.generalStore,
tkeyStore: this.tkeyStore,
nonce: this.nonce
}, this.tssNonces && {
tssNonces: this.tssNonces
}), this.tssPolyCommits && {
tssPolyCommits: this.tssPolyCommits
}), this.factorPubs && {
factorPubs: this.factorPubs
}), this.factorEncs && {
factorEncs: this.factorEncs
});
}
}
class AuthMetadata {
constructor(metadata, privKey) {
_defineProperty(this, "metadata", void 0);
_defineProperty(this, "privKey", void 0);
this.metadata = metadata;
this.privKey = privKey;
}
static fromJSON(value) {
const {
data,
sig
} = value;
const m = Metadata.fromJSON(data);
if (!m.pubKey) throw CoreError.metadataPubKeyUnavailable();
const pubK = secp256k1.keyFromPublic({
x: m.pubKey.x.toString("hex", 64),
y: m.pubKey.y.toString("hex", 64)
}, "hex");
if (!pubK.verify(stripHexPrefix(keccak256(Buffer.from(stringify(data), "utf8"))), sig)) {
throw CoreError.default("Signature not valid for returning metadata");
}
return new AuthMetadata(m);
}
toJSON() {
const data = this.metadata;
if (!this.privKey) throw CoreError.privKeyUnavailable();
const k = toPrivKeyEC(this.privKey);
const sig = k.sign(stripHexPrefix(keccak256(Buffer.from(stringify(data), "utf8"))));
return {
data,
sig: sig.toDER("hex")
};
}
}
function pointToHex(p) {
return {
x: p.x.toString(16, 64),
y: p.y.toString(16, 64)
};
}
// TODO: handle errors for get and set with retries
const TSS_MODULE = "tssModule";
const ACCOUNTSALT = "accountSalt";
class ThresholdKey {
constructor(args) {
_defineProperty(this, "modules", void 0);
_defineProperty(this, "enableLogging", void 0);
_defineProperty(this, "serviceProvider", void 0);
_defineProperty(this, "storageLayer", void 0);
_defineProperty(this, "shares", void 0);
_defineProperty(this, "privKey", void 0);
_defineProperty(this, "lastFetchedCloudMetadata", void 0);
_defineProperty(this, "metadata", void 0);
_defineProperty(this, "manualSync", void 0);
_defineProperty(this, "tssTag", void 0);
_defineProperty(this, "_localMetadataTransitions", void 0);
_defineProperty(this, "_refreshMiddleware", void 0);
_defineProperty(this, "_reconstructKeyMiddleware", void 0);
_defineProperty(this, "_shareSerializationMiddleware", void 0);
_defineProperty(this, "_accountSalt", void 0);
_defineProperty(this, "storeDeviceShare", void 0);
_defineProperty(this, "haveWriteMetadataLock", void 0);
_defineProperty(this, "serverTimeOffset", 0);
_defineProperty(this, "serverTssPubIndexMap", {});
const {
enableLogging = false,
modules = {},
serviceProvider,
storageLayer,
manualSync = false,
tssTag,
serverTimeOffset
} = args || {};
this.enableLogging = enableLogging;
this.serviceProvider = serviceProvider;
this.storageLayer = storageLayer;
this.modules = modules;
this.shares = {};
this.privKey = undefined;
this.manualSync = manualSync;
this._refreshMiddleware = {};
this._reconstructKeyMiddleware = {};
this._shareSerializationMiddleware = undefined;
this.storeDeviceShare = undefined;
this._localMetadataTransitions = [[], []];
this.setModuleReferences(); // Providing ITKeyApi access to modules
this.haveWriteMetadataLock = "";
this.tssTag = tssTag || "default";
this.serverTimeOffset = serverTimeOffset;
}
static async fromJSON(value, args) {
const {
enableLogging,
privKey,
metadata,
shares,
_localMetadataTransitions,
manualSync,
lastFetchedCloudMetadata,
tssTag,
serverTimeOffset
} = value;
const {
storageLayer,
serviceProvider,
modules
} = args;
const tb = new ThresholdKey({
tssTag,
enableLogging,
storageLayer,
serviceProvider,
modules,
manualSync,
serverTimeOffset
});
if (privKey) tb.privKey = new BN(privKey, "hex");
for (const key in shares) {
if (Object.prototype.hasOwnProperty.call(shares, key)) {
const shareStoreMapElement = shares[key];
for (const shareElementKey in shareStoreMapElement) {
if (Object.prototype.hasOwnProperty.call(shareStoreMapElement, shareElementKey)) {
const shareStore = shareStoreMapElement[shareElementKey];
shareStoreMapElement[shareElementKey] = ShareStore.fromJSON(shareStore);
}
}
}
}
tb.shares = shares;
// switch to deserialize local metadata transition based on Object.keys() of authMetadata, ShareStore's and, IMessageMetadata
const AuthMetadataKeys = Object.keys(JSON.parse(stringify(new AuthMetadata(new Metadata(new Point("0", "0")), new BN("0", "hex")))));
const ShareStoreKeys = Object.keys(JSON.parse(stringify(new ShareStore(new Share("0", "0"), ""))));
const sampleMessageMetadata = {
message: "Sample message",
dateAdded: Date.now()
};
const MessageMetadataKeys = Object.keys(sampleMessageMetadata);
const localTransitionShares = [];
const localTransitionData = [];
_localMetadataTransitions[0].forEach((x, index) => {
if (x) {
localTransitionShares.push(new BN(x, "hex"));
} else {
localTransitionShares.push(undefined);
}
const keys = Object.keys(_localMetadataTransitions[1][index]);
if (keys.length === AuthMetadataKeys.length && keys.every(val => AuthMetadataKeys.includes(val))) {
const tempAuth = AuthMetadata.fromJSON(_localMetadataTransitions[1][index]);
tempAuth.privKey = privKey;
localTransitionData.push(tempAuth);
} else if (keys.length === ShareStoreKeys.length && keys.every(val => ShareStoreKeys.includes(val))) {
localTransitionData.push(ShareStore.fromJSON(_localMetadataTransitions[1][index]));
} else if (keys.length === MessageMetadataKeys.length && keys.every(val => MessageMetadataKeys.includes(val))) {
localTransitionData.push(_localMetadataTransitions[1][index]);
} else {
throw CoreError.default("fromJSON failed. Could not deserialise _localMetadataTransitions");
}
});
if (metadata || lastFetchedCloudMetadata) {
let tempMetadata;
let tempCloud;
let shareToUseForSerialization;
// if service provider key is missing, we should initialize with one of the existing shares
// TODO: fix for deleted share
if (tb.serviceProvider.postboxKey.toString("hex") === "0") {
const latestPolyIDOnCloud = Metadata.fromJSON(lastFetchedCloudMetadata).getLatestPublicPolynomial().getPolynomialID();
const shareIndexesExistInSDK = Object.keys(shares[latestPolyIDOnCloud]);
const randomIndex = shareIndexesExistInSDK[Math.floor(Math.random() * (shareIndexesExistInSDK.length - 1))];
if (shareIndexesExistInSDK.length >= 1) {
shareToUseForSerialization = shares[latestPolyIDOnCloud][randomIndex];
}
}
if (metadata) tempMetadata = Metadata.fromJSON(metadata);
if (lastFetchedCloudMetadata) tempCloud = Metadata.fromJSON(lastFetchedCloudMetadata);
await tb.initialize({
neverInitializeNewKey: true,
transitionMetadata: tempMetadata,
previouslyFetchedCloudMetadata: tempCloud,
previousLocalMetadataTransitions: [localTransitionShares, localTransitionData],
withShare: shareToUseForSerialization
});
} else {
await tb.initialize({
neverInitializeNewKey: true
});
}
return tb;
}
getStorageLayer() {
return this.storageLayer;
}
getMetadata() {
if (typeof this.metadata !== "undefined") {
return this.metadata;
}
throw CoreError.metadataUndefined();
}
async initialize(params) {
// setup initial params/states
const p = params || {};
if (p.delete1OutOf1 && !this.manualSync) throw CoreError.delete1OutOf1OnlyManualSync();
const {
withShare,
importKey,
neverInitializeNewKey,
transitionMetadata,
previouslyFetchedCloudMetadata,
previousLocalMetadataTransitions,
useTSS,
deviceTSSShare,
factorPub,
deviceTSSIndex
} = p;
if (useTSS && !factorPub) {
throw CoreError.default("cannot use TSS without providing factor key");
}
const previousLocalMetadataTransitionsExists = previousLocalMetadataTransitions && previousLocalMetadataTransitions[0].length > 0 && previousLocalMetadataTransitions[1].length > 0;
const reinitializing = transitionMetadata && previousLocalMetadataTransitionsExists; // are we reinitializing the SDK?
// in the case we're reinitializing whilst newKeyAssign has not been synced
const reinitializingWithNewKeyAssign = reinitializing && previouslyFetchedCloudMetadata === undefined;
let shareStore;
if (withShare instanceof ShareStore) {
shareStore = withShare;
} else if (typeof withShare === "object") {
shareStore = ShareStore.fromJSON(withShare);
} else if (!withShare) {
// default to use service provider
// first we see if a share has been kept for us
const spIncludeLocalMetadataTransitions = reinitializingWithNewKeyAssign;
const spLocalMetadataTransitions = reinitializingWithNewKeyAssign ? previousLocalMetadataTransitions : undefined;
const rawServiceProviderShare = await this.getGenericMetadataWithTransitionStates({
serviceProvider: this.serviceProvider,
includeLocalMetadataTransitions: spIncludeLocalMetadataTransitions,
_localMetadataTransitions: spLocalMetadataTransitions,
fromJSONConstructor: {
fromJSON(val) {
return val;
}
}
});
const noKeyFound = rawServiceProviderShare;
if (noKeyFound.message === KEY_NOT_FOUND) {
if (neverInitializeNewKey) {
throw CoreError.default("key has not been generated yet");
}
// no metadata set, assumes new user
await this._initializeNewKey({
initializeModules: true,
importedKey: importKey,
delete1OutOf1: p.delete1OutOf1
});
if (useTSS) {
const {
factorEncs,
factorPubs,
tssPolyCommits
} = await this._initializeNewTSSKey(this.tssTag, deviceTSSShare, factorPub, deviceTSSIndex);
this.metadata.addTSSData({
tssTag: this.tssTag,
tssNonce: 0,
tssPolyCommits,
factorPubs,
factorEncs
});
const accountSalt = generateSalt();
await this._setTKeyStoreItem(TSS_MODULE, {
id: "accountSalt",
value: accountSalt
});
this._accountSalt = accountSalt;
}
return this.getKeyDetails();
}
// else we continue with catching up share and metadata
shareStore = ShareStore.fromJSON(rawServiceProviderShare);
} else {
throw CoreError.default("Input is not supported");
}
// We determine the latest metadata on the SDK and if there has been
// needed transitions to include
let currentMetadata;
let latestCloudMetadata;
// we fetch the latest metadata for the account from the share
let latestShareDetails;
try {
latestShareDetails = await this.catchupToLatestShare({
shareStore
});
} catch (error) {
// check if error is not the undefined error
// if so we don't throw immediately incase there is valid transition metadata
const err = error;
const noMetadataExistsForShare = err.code === 1503;
if (!noMetadataExistsForShare || !reinitializing) {
throw err;
}
}
// lets check if the cloud metadata has been updated or not from previously if we are reinitializing
if (reinitializing && !reinitializingWithNewKeyAssign) {
if (previouslyFetchedCloudMetadata.nonce < latestShareDetails.shareMetadata.nonce) {
throw CoreError.fromCode(1104);
} else if (previouslyFetchedCloudMetadata.nonce > latestShareDetails.shareMetadata.nonce) {
throw CoreError.fromCode(1105);
}
latestCloudMetadata = previouslyFetchedCloudMetadata;
} else {
latestCloudMetadata = latestShareDetails ? latestShareDetails.shareMetadata.clone() : undefined;
}
// If we've been provided with transition metadata we use that as the current metadata instead
// as we want to maintain state before and after serialization.
// (Given that the checks for cloud metadata pass)
if (reinitializing) {
currentMetadata = transitionMetadata;
this._localMetadataTransitions = previousLocalMetadataTransitions;
} else {
currentMetadata = latestShareDetails.shareMetadata;
}
this.lastFetchedCloudMetadata = latestCloudMetadata;
this.metadata = currentMetadata;
const latestShare = latestShareDetails ? latestShareDetails.latestShare : shareStore;
this.inputShareStore(latestShare);
// initialize modules
await this.initializeModules();
if (useTSS) {
if (!this.metadata.tssPolyCommits[this.tssTag]) {
// if tss shares have not been created for this tssTag, create new tss sharing
await this._initializeNewTSSKey(this.tssTag, deviceTSSShare, factorPub);
}
}
return this.getKeyDetails();
}
getFactorEncs(factorPub) {
if (!this.metadata) throw CoreError.metadataUndefined();
if (!this.metadata.factorEncs) throw CoreError.default("no factor encs mapping");
if (!this.metadata.factorPubs) throw CoreError.default("no factor pubs mapping");
const factorPubs = this.metadata.factorPubs[this.tssTag];
if (!factorPubs) throw CoreError.default(`no factor pubs for this tssTag ${this.tssTag}`);
if (factorPubs.filter(f => f.x.cmp(factorPub.x) === 0 && f.y.cmp(factorPub.y) === 0).length === 0) throw CoreError.default(`factor pub ${factorPub} not found for tssTag ${this.tssTag}`);
if (!this.metadata.factorEncs[this.tssTag]) throw CoreError.default(`no factor encs for tssTag ${this.tssTag}`);
const factorPubID = factorPub.x.toString(16, 64);
return this.metadata.factorEncs[this.tssTag][factorPubID];
}
/**
* getTSSShare accepts a factorKey and returns the TSS share based on the factor encrypted TSS shares in the metadata
* @param factorKey - factor key
*/
async getTSSShare(factorKey, opts) {
if (!this.privKey) throw CoreError.default("tss share cannot be returned until you've reconstructed tkey");
const factorPub = getPubKeyPoint(factorKey);
const factorEncs = this.getFactorEncs(factorPub);
const {
userEnc,
serverEncs,
tssIndex,
type
} = factorEncs;
const userDecryption = await decrypt(Buffer.from(factorKey.toString(16, 64), "hex"), userEnc);
const serverDecryptions = await Promise.all(serverEncs.map(factorEnc => {
if (factorEnc === null) return null;
return decrypt(Buffer.from(factorKey.toString(16, 64), "hex"), factorEnc);
}));
const tssShareBufs = [userDecryption].concat(serverDecryptions);
const tssShareBNs = tssShareBufs.map(buf => {
if (buf === null) return null;
return new BN(buf.toString("hex"), "hex");
});
const tssCommits = this.getTSSCommits();
const userDec = tssShareBNs[0];
const {
threshold,
accountIndex
} = opts || {};
if (type === "direct") {
const tssSharePub = secp256k1.g.mul(userDec);
const tssCommitA0 = secp256k1.keyFromPublic({
x: tssCommits[0].x.toString(16, 64),
y: tssCommits[0].y.toString(16, 64)
}).getPublic();
const tssCommitA1 = secp256k1.keyFromPublic({
x: tssCommits[1].x.toString(16, 64),
y: tssCommits[1].y.toString(16, 64)
}).getPublic();
let _tssSharePub = tssCommitA0;
for (let j = 0; j < tssIndex; j++) {
_tssSharePub = _tssSharePub.add(tssCommitA1);
}
if (tssSharePub.getX().cmp(_tssSharePub.getX()) === 0 && tssSharePub.getY().cmp(_tssSharePub.getY()) === 0) {
if (accountIndex && accountIndex > 0) {
const nonce = this.computeAccountNonce(accountIndex);
const derivedShare = userDec.add(nonce).umod(secp256k1.n);
return {
tssIndex,
tssShare: derivedShare
};
}
return {
tssIndex,
tssShare: userDec
};
}
throw new Error("user decryption does not match tss commitments...");
}
// if type === "hierarchical"
const serverDecs = tssShareBNs.slice(1); // 5 elems
const serverIndexes = new Array(serverDecs.length).fill(null).map((_, i) => i + 1);
const combis = kCombinations(serverDecs.length, threshold || Math.ceil(serverDecs.length / 2));
for (let i = 0; i < combis.length; i++) {
const combi = combis[i];
const selectedServerDecs = serverDecs.filter((_, j) => combi.indexOf(j) > -1);
if (selectedServerDecs.includes(null)) continue;
const selectedServerIndexes = serverIndexes.filter((_, j) => combi.indexOf(j) > -1);
const serverLagrangeCoeffs = selectedServerIndexes.map(x => getLagrangeCoeffs(selectedServerIndexes, x));
const serverInterpolated = dotProduct(serverLagrangeCoeffs, selectedServerDecs, secp256k1.n);
const lagrangeCoeffs = [getLagrangeCoeffs([1, 99], 1), getLagrangeCoeffs([1, 99], 99)];
const tssShare = dotProduct(lagrangeCoeffs, [serverInterpolated, userDec], secp256k1.n);
const tssSharePub = secp256k1.g.mul(tssShare);
const tssCommitA0 = secp256k1.keyFromPublic({
x: tssCommits[0].x.toString(16, 64),
y: tssCommits[0].y.toString(16, 64)
}).getPublic();
const tssCommitA1 = secp256k1.keyFromPublic({
x: tssCommits[1].x.toString(16, 64),
y: tssCommits[1].y.toString(16, 64)
}).getPublic();
let _tssSharePub = tssCommitA0;
for (let j = 0; j < tssIndex; j++) {
_tssSharePub = _tssSharePub.add(tssCommitA1);
}
if (tssSharePub.getX().cmp(_tssSharePub.getX()) === 0 && tssSharePub.getY().cmp(_tssSharePub.getY()) === 0) {
if (accountIndex && accountIndex > 0) {
const nonce = this.computeAccountNonce(accountIndex);
const derivedShare = tssShare.add(nonce).umod(secp256k1.n);
return {
tssIndex,
tssShare: derivedShare
};
}
return {
tssIndex,
tssShare
};
}
}
throw new Error("could not find any combination of server decryptions that match tss commitments...");
}
getTSSCommits() {
if (!this.privKey) throw CoreError.default("tss pub cannot be returned until you've reconstructed tkey");
if (!this.metadata) throw CoreError.metadataUndefined();
const tssPolyCommits = this.metadata.tssPolyCommits[this.tssTag];
if (!tssPolyCommits) throw CoreError.default(`tss poly commits not found for tssTag ${this.tssTag}`);
if (tssPolyCommits.length === 0) throw CoreError.default("tss poly commits is empty");
return tssPolyCommits;
}
getTSSPub(accountIndex) {
const tssCommits = this.getTSSCommits();
if (accountIndex && accountIndex > 0) {
const nonce = this.computeAccountNonce(accountIndex);
// we need to add the pub key nonce to the tssPub
const noncePub = secp256k1.keyFromPrivate(nonce.toString("hex")).getPublic();
const pubKeyPoint = secp256k1.keyFromPublic({
x: tssCommits[0].x.toString("hex"),
y: tssCommits[0].y.toString("hex")
}).getPublic();
const devicePubKeyPoint = pubKeyPoint.add(noncePub);
return new Point(devicePubKeyPoint.getX().toString("hex"), devicePubKeyPoint.getY().toString("hex"));
}
return tssCommits[0];
}
async getServerTssPubAndIndexes(tag, tssNonce) {
const pubKeyIndexes = this.serverTssPubIndexMap[`${tag}-${tssNonce}`];
if (pubKeyIndexes) return pubKeyIndexes;
const {
pubKey,
nodeIndexes
} = await this.serviceProvider.getTSSPubKey(this.tssTag, tssNonce);
this.serverTssPubIndexMap[`${tag}-${tssNonce}`] = {
pubKey,
nodeIndexes
};
return {
pubKey,
nodeIndexes
};
}
/**
* catchupToLatestShare recursively loops fetches metadata of the provided share and checks if there is an encrypted share for it.
* @param shareStore - share to start of with
* @param polyID - if specified, polyID to refresh to if it exists
*/
async catchupToLatestShare(params) {
const {
shareStore,
polyID,
includeLocalMetadataTransitions
} = params;
let shareMetadata;
try {
shareMetadata = await this.getAuthMetadata({
privKey: shareStore.share.share,
includeLocalMetadataTransitions
});
} catch (error) {
// delete share error
const err = error;
if (err && err.code === 1308) {
throw err;
}
throw CoreError.authMetadataGetUnavailable(`, ${prettyPrintError(err)}`);
}
try {
// if matches specified polyID return it
if (polyID) {
if (shareStore.polynomialID === polyID) {
return {
latestShare: shareStore,
shareMetadata
};
}
}
const nextShare = await shareMetadata.getEncryptedShare(shareStore);
return await this.catchupToLatestShare({
shareStore: nextShare,
polyID,
includeLocalMetadataTransitions
});
} catch (error) {
// delete share error
const err = error;
if (err && err.code === 1308) {
throw err;
}
return {
latestShare: shareStore,
shareMetadata
};
}
}
async reconstructKey(_reconstructKeyMiddleware = true) {
if (!this.metadata) {
throw CoreError.metadataUndefined();
}
const pubPoly = this.metadata.getLatestPublicPolynomial();
const requiredThreshold = pubPoly.getThreshold();
const pubPolyID = pubPoly.getPolynomialID();
// check if we have enough shares to meet threshold
let sharesLeft = requiredThreshold;
// we don't just check the latest poly but
// we check if the shares on previous polynomials in our stores have the share indexes we require
const fullShareList = this.metadata.getShareIndexesForPolynomial(pubPolyID);
const shareIndexesRequired = {};
for (let i = 0; i < fullShareList.length; i += 1) {
shareIndexesRequired[fullShareList[i]] = true;
}
const sharesToInput = [];
for (let z = this.metadata.polyIDList.length - 1; z >= 0 && sharesLeft > 0; z -= 1) {
const sharesForPoly = this.shares[this.metadata.polyIDList[z][0]];
if (sharesForPoly) {
const shareIndexesForPoly = Object.keys(sharesForPoly);
for (let k = 0; k < shareIndexesForPoly.length && sharesLeft > 0; k += 1) {
if (shareIndexesForPoly[k] in shareIndexesRequired) {
const currentShareForPoly = sharesForPoly[shareIndexesForPoly[k]];
if (currentShareForPoly.polynomialID === pubPolyID) {
sharesToInput.push(currentShareForPoly);
} else {
const latestShareRes = await this.catchupToLatestShare({
shareStore: currentShareForPoly,
polyID: pubPolyID,
includeLocalMetadataTransitions: true
});
if (latestShareRes.latestShare.polynomialID === pubPolyID) {
sharesToInput.push(latestShareRes.latestShare);
} else {
throw CoreError.fromCode(1304, "Share found in unexpected polynomial"); // Share found in unexpected polynomial
}
}
delete shareIndexesRequired[shareIndexesForPoly[k]];
sharesLeft -= 1;
}
}
}
}
// Input shares to ensure atomicity
sharesToInput.forEach(share => {
this.inputShareStore(share);
});
if (sharesLeft > 0) {
throw CoreError.unableToReconstruct(` require ${requiredThreshold} but have ${requiredThreshold - sharesLeft}`);
}
const polyShares = Object.keys(this.shares[pubPolyID]);
const shareArr = [];
const shareIndexArr = [];
for (let i = 0; i < requiredThreshold; i += 1) {
shareArr.push(this.shares[pubPolyID][polyShares[i]].share.share);
shareIndexArr.push(this.shares[pubPolyID][polyShares[i]].share.shareIndex);
}
const privKey = lagrangeInterpolation(shareArr, shareIndexArr);
// check that priv key regenerated is correct
const reconstructedPubKey = getPubKeyPoint(privKey);
if (this.metadata.pubKey.x.cmp(reconstructedPubKey.x) !== 0) {
throw CoreError.incorrectReconstruction();
}
this._setKey(privKey);
const returnObject = {
allKeys: [privKey]
};
if (_reconstructKeyMiddleware && Object.keys(this._reconstructKeyMiddleware).length > 0) {
// retireve/reconstruct extra keys that live on metadata
await Promise.all(Object.keys(this._reconstructKeyMiddleware).map(async x => {
if (Object.prototype.hasOwnProperty.call(this._reconstructKeyMiddleware, x)) {
const extraKeys = await this._reconstructKeyMiddleware[x]();
returnObject[x] = extraKeys;
returnObject.allKeys.push(...extraKeys);
}
}));
}
// only valid for use Tss
// assign account salt from tKey store if it exists
if (Object.keys(this.metadata.tssPolyCommits).length > 0) {
const accountSalt = await this.getTKeyStoreItem(TSS_MODULE, "accountSalt");
if (accountSalt && accountSalt.value) {
this._accountSalt = accountSalt.value;
} else {
const newSalt = generateSalt();
await this._setTKeyStoreItem(TSS_MODULE, {
id: "accountSalt",
value: newSalt
});
this._accountSalt = newSalt;
// this is very specific case where exisiting user do not have salt.
// sync metadata to cloud to ensure salt is stored incase of manual sync mode
// new user or importKey should not hit this cases
// NOTE this is not mistake, we force sync for this case
if (this.manualSync) await this.syncLocalMetadataTransitions();
}
}
return _objectSpread({
privKey
}, returnObject);
}
reconstructLatestPoly() {
if (!this.metadata) {
throw CoreError.metadataUndefined();
}
const pubPoly = this.metadata.getLatestPublicPolynomial();
const pubPolyID = pubPoly.getPolynomialID();
const threshold = pubPoly.getThreshold();
const pointsArr = [];
const sharesForExistingPoly = Object.keys(this.shares[pubPolyID]);
if (sharesForExistingPoly.length < threshold) {
throw CoreError.unableToReconstruct("not enough shares to reconstruct poly");
}
if (new Set(sharesForExistingPoly).size !== sharesForExistingPoly.length) {
throw CoreError.default("share indexes should be unique");
}
for (let i = 0; i < threshold; i += 1) {
pointsArr.push(new Point(new BN(sharesForExistingPoly[i], "hex"), this.shares[pubPolyID][sharesForExistingPoly[i]].share.share));
}
return lagrangeInterpolatePolynomial(pointsArr);
}
async deleteShare(shareIndex, useTSS, tssOptions) {
if (!this.metadata) {
throw CoreError.metadataUndefined();
}
if (!this.privKey) {
throw CoreError.privateKeyUnavailable();
}
if (useTSS && !tssOptions) {
throw CoreError.default("cannot useTSS if tssOptions is empty");
}
const shareIndexToDelete = new BN(shareIndex, "hex");
const shareToDelete = this.outputShareStore(shareIndexToDelete);
if (shareIndexToDelete.cmp(new BN("1", "hex")) === 0) {
throw CoreError.fromCode(1001, "Unable to delete service provider share");
}
// Get existing shares
const pubPoly = thi