UNPKG

turtlecoin-wallet-backend

Version:

[![Build Status](https://travis-ci.org/turtlecoin/turtlecoin-wallet-backend-js.svg?branch=master)](https://travis-ci.org/turtlecoin/turtlecoin-wallet-backend-js)

475 lines (474 loc) 21.1 kB
"use strict"; // Copyright (c) 2018, Zpalmtree // // Please see the included LICENSE file for more information. var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); const _ = require("lodash"); const sizeof = require('object-sizeof'); const events_1 = require("events"); const Config_1 = require("./Config"); const Utilities_1 = require("./Utilities"); const Constants_1 = require("./Constants"); const SynchronizationStatus_1 = require("./SynchronizationStatus"); const Logger_1 = require("./Logger"); const CryptoWrapper_1 = require("./CryptoWrapper"); const Types_1 = require("./Types"); /** * Decrypts blocks for our transactions and inputs */ class WalletSynchronizer extends events_1.EventEmitter { constructor(daemon, subWallets, startTimestamp, startHeight, privateViewKey, config) { super(); /** * Stores the progress of our synchronization */ this.synchronizationStatus = new SynchronizationStatus_1.SynchronizationStatus(); /** * Whether we are already downloading a chunk of blocks */ this.fetchingBlocks = false; /** * Stored blocks for later processing */ this.storedBlocks = []; /** * Transactions that have disappeared from the pool and not appeared in a * block, and the amount of times they have failed this check. */ this.cancelledTransactionsFailCount = new Map(); /** * Function to run on block download completion to ensure reset() works * correctly without blocks being stored after wiping them. */ this.finishedFunc = undefined; /** * Number of times we've failed to fetch blocks. */ this.failCount = 0; /** * Last time we fetched blocks from the daemon. If this goes over the * configured limit, we'll emit deadnode. */ this.lastDownloadedBlocks = new Date(); this.config = new Config_1.Config(); this.daemon = daemon; this.startTimestamp = startTimestamp; this.startHeight = startHeight; this.privateViewKey = privateViewKey; this.subWallets = subWallets; this.config = config; } static fromJSON(json) { const walletSynchronizer = Object.create(WalletSynchronizer.prototype); return Object.assign(walletSynchronizer, { privateViewKey: json.privateViewKey, startHeight: json.startHeight, startTimestamp: json.startTimestamp, synchronizationStatus: SynchronizationStatus_1.SynchronizationStatus.fromJSON(json.transactionSynchronizerStatus), }); } swapNode(newDaemon) { this.daemon = newDaemon; } getScanHeights() { return [this.startHeight, this.startTimestamp]; } /** * Initialize things we can't initialize from the JSON */ initAfterLoad(subWallets, daemon, config) { this.subWallets = subWallets; this.daemon = daemon; this.storedBlocks = []; this.config = config; this.cancelledTransactionsFailCount = new Map(); this.lastDownloadedBlocks = new Date(); this.failCount = 0; } /** * Convert from class to stringable type */ toJSON() { return { privateViewKey: this.privateViewKey, startHeight: this.startHeight, startTimestamp: this.startTimestamp, transactionSynchronizerStatus: this.synchronizationStatus.toJSON(), }; } processBlock(block, ourInputs) { const txData = new Types_1.TransactionData(); if (this.config.scanCoinbaseTransactions) { const tx = this.processCoinbaseTransaction(block, ourInputs); if (tx !== undefined) { txData.transactionsToAdd.push(tx); } } for (const rawTX of block.transactions) { const [tx, keyImagesToMarkSpent] = this.processTransaction(block, ourInputs, rawTX); if (tx !== undefined) { txData.transactionsToAdd.push(tx); txData.keyImagesToMarkSpent = txData.keyImagesToMarkSpent.concat(keyImagesToMarkSpent); } } txData.inputsToAdd = ourInputs; return txData; } /** * Process transaction outputs of the given block. No external dependencies, * lets us easily swap out with a C++ replacement for SPEEEED * * @param keys Array of spend keys in the format [publicKey, privateKey] */ processBlockOutputs(block, privateViewKey, spendKeys, isViewWallet, processCoinbaseTransactions) { return __awaiter(this, void 0, void 0, function* () { let inputs = []; /* Process the coinbase tx if we're not skipping them for speed */ if (processCoinbaseTransactions && block.coinbaseTransaction) { inputs = inputs.concat(yield this.processTransactionOutputs(block.coinbaseTransaction, block.blockHeight)); } /* Process the normal txs */ for (const tx of block.transactions) { inputs = inputs.concat(yield this.processTransactionOutputs(tx, block.blockHeight)); } return inputs; }); } /** * Get the height of the sync process */ getHeight() { return this.synchronizationStatus.getHeight(); } reset(scanHeight, scanTimestamp) { return new Promise((resolve) => { const f = () => { this.startHeight = scanHeight; this.startTimestamp = scanTimestamp; /* Discard sync status */ this.synchronizationStatus = new SynchronizationStatus_1.SynchronizationStatus(scanHeight - 1); this.storedBlocks = []; }; if (this.fetchingBlocks) { this.finishedFunc = () => { f(); resolve(); this.finishedFunc = undefined; }; } else { f(); resolve(); } }); } rewind(scanHeight) { return new Promise((resolve) => { const f = () => { this.startHeight = scanHeight; this.startTimestamp = 0; /* Discard sync status */ this.synchronizationStatus = new SynchronizationStatus_1.SynchronizationStatus(scanHeight - 1); this.storedBlocks = []; }; if (this.fetchingBlocks) { this.finishedFunc = () => { f(); resolve(); this.finishedFunc = undefined; }; } else { f(); resolve(); } }); } /** * Takes in hashes that we have previously sent. Returns transactions which * are no longer in the pool, and not in a block, and therefore have * returned to our wallet */ findCancelledTransactions(transactionHashes) { return __awaiter(this, void 0, void 0, function* () { /* This is the common case - don't waste time making a useless request to the daemon */ if (_.isEmpty(transactionHashes)) { return []; } const cancelled = yield this.daemon.getCancelledTransactions(transactionHashes); const toRemove = []; for (const [hash, failCount] of this.cancelledTransactionsFailCount) { /* Hash still not found, increment fail count */ if (cancelled.includes(hash)) { /* Failed too many times, cancel transaction, return funds to wallet */ if (failCount === 10) { toRemove.push(hash); this.cancelledTransactionsFailCount.delete(hash); } else { this.cancelledTransactionsFailCount.set(hash, failCount + 1); } /* Hash has since been found, remove from fail count array */ } else { this.cancelledTransactionsFailCount.delete(hash); } } for (const hash of cancelled) { /* Transaction with no history, first fail, add to map. */ if (!this.cancelledTransactionsFailCount.has(hash)) { this.cancelledTransactionsFailCount.set(hash, 1); } } return toRemove; }); } /** * Retrieve blockCount blocks from the internal store. Does not remove * them. */ fetchBlocks(blockCount) { return __awaiter(this, void 0, void 0, function* () { /* Fetch more blocks if we haven't got any downloaded yet */ if (this.storedBlocks.length === 0) { Logger_1.logger.log('No blocks stored, fetching more.', Logger_1.LogLevel.DEBUG, Logger_1.LogCategory.SYNC); const [check, downloadSuccess] = yield this.downloadBlocks(); /* In the middle of fetching blocks, don't need to fetch blocks, etc */ if (check) { if (!downloadSuccess) { this.failCount++; /* Seconds since we last got a block */ const diff = (new Date().getTime() - this.lastDownloadedBlocks.getTime()) / 1000; if (diff > this.config.maxLastFetchedBlockInterval) { this.emit('deadnode'); } } else { this.lastDownloadedBlocks = new Date(); this.failCount = 0; } } } return [_.take(this.storedBlocks, blockCount), this.failCount]; }); } dropBlock(blockHeight, blockHash) { /* it's possible for this function to get ran twice. Need to make sure we don't remove more than the block we just processed. */ if (this.storedBlocks.length >= 1 && this.storedBlocks[0].blockHeight === blockHeight && this.storedBlocks[0].blockHash === blockHash) { this.storedBlocks = _.drop(this.storedBlocks); this.synchronizationStatus.storeBlockHash(blockHeight, blockHash); } /* sizeof() gets a tad expensive... */ if (blockHeight % 10 === 0 && this.shouldFetchMoreBlocks()) { /* Note - not awaiting here */ this.downloadBlocks().then(([check, downloadSuccess]) => { if (check) { if (!downloadSuccess) { this.failCount++; /* Seconds since we last got a block */ const diff = (new Date().getTime() - this.lastDownloadedBlocks.getTime()) / 1000; if (diff > this.config.maxLastFetchedBlockInterval) { this.emit('deadnode'); } } else { this.lastDownloadedBlocks = new Date(); this.failCount = 0; } } }); } } getStoredBlockCheckpoints() { const hashes = []; for (const block of this.storedBlocks) { /* Add to start of array - we want hashes in descending block height order */ hashes.unshift(block.blockHash); } return _.take(hashes, Constants_1.LAST_KNOWN_BLOCK_HASHES_SIZE); } /** * Only retrieve more blocks if we're not getting close to the memory limit */ shouldFetchMoreBlocks() { /* Don't fetch more if we're already doing so */ if (this.fetchingBlocks) { return false; } /* Don't fetch more blocks if we've failed to fetch blocks multiple times */ if (this.failCount > 2) { return false; } const ramUsage = sizeof(this.storedBlocks); if (ramUsage < this.config.blockStoreMemoryLimit) { Logger_1.logger.log(`Approximate ram usage of stored blocks: ${Utilities_1.prettyPrintBytes(ramUsage)}, fetching more.`, Logger_1.LogLevel.DEBUG, Logger_1.LogCategory.SYNC); return true; } return false; } getBlockCheckpoints() { const unprocessedBlockHashes = this.getStoredBlockCheckpoints(); const recentProcessedBlockHashes = this.synchronizationStatus.getRecentBlockHashes(); const blockHashCheckpoints = this.synchronizationStatus.getBlockCheckpoints(); const combined = unprocessedBlockHashes.concat(recentProcessedBlockHashes); /* Take the 50 most recent block hashes, along with the infrequent checkpoints, to handle deep forks. */ return _.take(combined, Constants_1.LAST_KNOWN_BLOCK_HASHES_SIZE) .concat(blockHashCheckpoints); } downloadBlocks() { return __awaiter(this, void 0, void 0, function* () { /* Don't make more than one fetch request at once */ if (this.fetchingBlocks) { return [false, true]; } this.fetchingBlocks = true; const localDaemonBlockCount = this.daemon.getLocalDaemonBlockCount(); const walletBlockCount = this.getHeight(); if (localDaemonBlockCount < walletBlockCount) { this.fetchingBlocks = false; return [false, true]; } /* Get the checkpoints of the blocks we've got stored, so we can fetch later ones. Also use the checkpoints of the previously processed ones, in case we don't have any blocks yet. */ const blockCheckpoints = this.getBlockCheckpoints(); let blocks = []; let topBlock; try { [blocks, topBlock] = yield this.daemon.getWalletSyncData(blockCheckpoints, this.startHeight, this.startTimestamp, this.config.blocksPerDaemonRequest); } catch (err) { Logger_1.logger.log('Failed to get blocks from daemon', Logger_1.LogLevel.DEBUG, Logger_1.LogCategory.SYNC); if (this.finishedFunc) { this.finishedFunc(); } this.fetchingBlocks = false; return [true, false]; } if (typeof topBlock === 'object' && blocks.length === 0) { if (this.finishedFunc) { this.finishedFunc(); } this.synchronizationStatus.storeBlockHash(topBlock.height, topBlock.hash); /* Synced, store the top block so sync status displays correctly if we are not scanning coinbase tx only blocks. */ if (this.storedBlocks.length === 0) { this.emit('heightchange', topBlock.height); } Logger_1.logger.log('Zero blocks received from daemon, fully synced', Logger_1.LogLevel.DEBUG, Logger_1.LogCategory.SYNC); if (this.finishedFunc) { this.finishedFunc(); } this.fetchingBlocks = false; return [true, true]; } if (blocks.length === 0) { Logger_1.logger.log('Zero blocks received from daemon, possibly fully synced', Logger_1.LogLevel.DEBUG, Logger_1.LogCategory.SYNC); if (this.finishedFunc) { this.finishedFunc(); } this.fetchingBlocks = false; return [true, topBlock]; } /* Timestamp is transient and can change - block height is constant. */ if (this.startTimestamp !== 0) { this.startTimestamp = 0; this.startHeight = blocks[0].blockHeight; this.subWallets.convertSyncTimestampToHeight(this.startTimestamp, this.startHeight); } /* Add the new blocks to the store */ this.storedBlocks = this.storedBlocks.concat(blocks); if (this.finishedFunc) { this.finishedFunc(); } this.fetchingBlocks = false; return [true, true]; }); } /** * Process the outputs of a transaction, and create inputs that are ours */ processTransactionOutputs(rawTX, blockHeight) { return __awaiter(this, void 0, void 0, function* () { const inputs = []; const derivation = yield CryptoWrapper_1.generateKeyDerivation(rawTX.transactionPublicKey, this.privateViewKey, this.config); const spendKeys = this.subWallets.getPublicSpendKeys(); for (const [outputIndex, output] of rawTX.keyOutputs.entries()) { /* Derive the spend key from the transaction, using the previous derivation */ const derivedSpendKey = yield CryptoWrapper_1.underivePublicKey(derivation, outputIndex, output.key, this.config); /* See if the derived spend key matches any of our spend keys */ if (!_.includes(spendKeys, derivedSpendKey)) { continue; } /* The public spend key of the subwallet that owns this input */ const ownerSpendKey = derivedSpendKey; /* Not spent yet! */ const spendHeight = 0; const keyImage = yield this.subWallets.getTxInputKeyImage(ownerSpendKey, derivation, outputIndex); const txInput = new Types_1.TransactionInput(keyImage, output.amount, blockHeight, rawTX.transactionPublicKey, outputIndex, output.globalIndex, output.key, spendHeight, rawTX.unlockTime, rawTX.hash); inputs.push([ownerSpendKey, txInput]); } return inputs; }); } processCoinbaseTransaction(block, ourInputs) { /* Should be guaranteed to be defined here */ const rawTX = block.coinbaseTransaction; const transfers = new Map(); const relevantInputs = _.filter(ourInputs, ([key, input]) => { return input.parentTransactionHash === rawTX.hash; }); for (const [publicSpendKey, input] of relevantInputs) { transfers.set(publicSpendKey, input.amount + (transfers.get(publicSpendKey) || 0)); } if (!_.isEmpty(transfers)) { /* Coinbase transaction have no fee */ const fee = 0; const isCoinbaseTransaction = true; /* Coinbase transactions can't have payment ID's */ const paymentID = ''; return new Types_1.Transaction(transfers, rawTX.hash, fee, block.blockHeight, block.blockTimestamp, paymentID, rawTX.unlockTime, isCoinbaseTransaction); } return undefined; } processTransaction(block, ourInputs, rawTX) { const transfers = new Map(); const relevantInputs = _.filter(ourInputs, ([key, input]) => { return input.parentTransactionHash === rawTX.hash; }); for (const [publicSpendKey, input] of relevantInputs) { transfers.set(publicSpendKey, input.amount + (transfers.get(publicSpendKey) || 0)); } const spentKeyImages = []; for (const input of rawTX.keyInputs) { const [found, publicSpendKey] = this.subWallets.getKeyImageOwner(input.keyImage); if (found) { transfers.set(publicSpendKey, -input.amount + (transfers.get(publicSpendKey) || 0)); spentKeyImages.push([publicSpendKey, input.keyImage]); } } if (!_.isEmpty(transfers)) { const fee = _.sumBy(rawTX.keyInputs, 'amount') - _.sumBy(rawTX.keyOutputs, 'amount'); const isCoinbaseTransaction = false; return [new Types_1.Transaction(transfers, rawTX.hash, fee, block.blockHeight, block.blockTimestamp, rawTX.paymentID, rawTX.unlockTime, isCoinbaseTransaction), spentKeyImages]; } return [undefined, []]; } } exports.WalletSynchronizer = WalletSynchronizer;