UNPKG

bitcore-node

Version:

A blockchain indexing node with extended capabilities using bitcore

250 lines 11.7 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); Object.defineProperty(exports, "__esModule", { value: true }); const bson_1 = require("bson"); const chai_1 = require("chai"); const crypto = __importStar(require("crypto")); const coin_1 = require("../../../src/models/coin"); const transaction_1 = require("../../../src/models/transaction"); const walletAddress_1 = require("../../../src/models/walletAddress"); const transaction_2 = require("../../../src/providers/chain-state/evm/models/transaction"); const unprocessedBlocksETH_1 = require("../../data/ETH/unprocessedBlocksETH"); const helpers_1 = require("../../helpers"); const integration_1 = require("../../helpers/integration"); async function makeMempoolTxChain(chain, network, startingTxid, chainLength = 1) { let txid = startingTxid; let nextTxid = crypto .createHash('sha256') .update(txid + 1) .digest() .toString('hex'); let allTxids = new Array(); for (let i = 1; i <= chainLength; i++) { const badMempoolTx = { chain, network, blockHeight: -1, txid }; const badMempoolOutputs = { chain, network, mintHeight: -1, mintTxid: txid, spentTxid: i != chainLength ? nextTxid : '', mintIndex: 0, spentHeight: -1 }; await transaction_1.TransactionStorage.collection.insertOne(badMempoolTx); await coin_1.CoinStorage.collection.insertOne(badMempoolOutputs); allTxids.push(txid); txid = nextTxid; nextTxid = crypto .createHash('sha256') .update(txid + 1) .digest() .toString('hex'); } return allTxids; } describe('Transaction Model', function () { const suite = this; this.timeout(30000); before(integration_1.intBeforeHelper); after(async () => (0, integration_1.intAfterHelper)(suite)); beforeEach(async () => { await (0, helpers_1.resetDatabase)(); }); const chain = 'BCH'; const network = 'integration'; const blockTx = { chain, network, blockHeight: 1, txid: '01234' }; const blockTxOutputs = { chain, network, mintHeight: 1, mintTxid: '01234', mintIndex: 0, spentHeight: -1, spentTxid: '12345' }; const block2TxOutputs = { chain, network, mintHeight: 2, mintTxid: '123456', mintIndex: 0, spentHeight: -1 }; it('should mark transactions invalid that were in the mempool, but no longer valid', async () => { // insert a valid tx, with a valid output await transaction_1.TransactionStorage.collection.insertOne(blockTx); await coin_1.CoinStorage.collection.insertOne(blockTxOutputs); const chainLength = 1; const txids = await makeMempoolTxChain(chain, network, blockTxOutputs.spentTxid, chainLength); const spentOps = new Array(); spentOps.push({ updateOne: { filter: { chain, network, mintIndex: blockTxOutputs.mintIndex, mintTxid: blockTxOutputs.mintTxid, spentHeight: { $lt: 0 } }, update: { $set: { spentHeight: block2TxOutputs.mintHeight, spentTxid: block2TxOutputs.mintTxid } } } }); await transaction_1.TransactionStorage.pruneMempool({ chain, network, initialSyncComplete: true, spendOps: spentOps }); const badTxs = await transaction_1.TransactionStorage.collection.find({ chain, network, txid: { $in: txids } }).toArray(); (0, chai_1.expect)(badTxs.length).to.eq(chainLength); (0, chai_1.expect)(badTxs.map(tx => tx.blockHeight)).to.deep.eq(new Array(chainLength).fill(-3 /* SpentHeightIndicators.conflicting */)); const goodTxs = await transaction_1.TransactionStorage.collection.find({ chain, network, txid: blockTx.txid }).toArray(); (0, chai_1.expect)(goodTxs.length).to.eq(1); (0, chai_1.expect)(goodTxs[0].txid).to.eq(blockTx.txid); (0, chai_1.expect)(goodTxs[0].blockHeight).to.eq(blockTx.blockHeight); }); it('should mark a chain of transactions invalid that were in the mempool, but no longer valid', async () => { // insert a valid tx, with a valid output await transaction_1.TransactionStorage.collection.insertOne(blockTx); await coin_1.CoinStorage.collection.insertOne(blockTxOutputs); const chainLength = 5; const txids = await makeMempoolTxChain(chain, network, blockTxOutputs.spentTxid, chainLength); const allRelatedCoins = await transaction_1.TransactionStorage.findAllRelatedOutputs(blockTxOutputs.spentTxid); (0, chai_1.expect)(allRelatedCoins.length).to.eq(chainLength); const spentOps = new Array(); spentOps.push({ updateOne: { filter: { chain, network, mintIndex: blockTxOutputs.mintIndex, mintTxid: blockTxOutputs.mintTxid, spentHeight: { $lt: 0 } }, update: { $set: { spentHeight: block2TxOutputs.mintHeight, spentTxid: block2TxOutputs.mintTxid } } } }); await transaction_1.TransactionStorage.pruneMempool({ chain, network, initialSyncComplete: true, spendOps: spentOps }); const badTxs = await transaction_1.TransactionStorage.collection.find({ chain, network, txid: { $in: txids } }).toArray(); (0, chai_1.expect)(badTxs.length).to.eq(chainLength); // the replaced tx is marked as conflicting, all the rest still pending to be cleaned up by pruning service (0, chai_1.expect)(badTxs[0].blockHeight).to.eq(-3 /* SpentHeightIndicators.conflicting */); (0, chai_1.expect)(badTxs[0].replacedByTxid).to.exist; (0, chai_1.expect)(badTxs.slice(1).every(tx => tx.blockHeight === -1 /* SpentHeightIndicators.pending */)).to.equal(true); const goodTxs = await transaction_1.TransactionStorage.collection.find({ chain, network, txid: blockTx.txid }).toArray(); (0, chai_1.expect)(goodTxs.length).to.eq(1); (0, chai_1.expect)(goodTxs[0].txid).to.eq(blockTx.txid); (0, chai_1.expect)(goodTxs[0].blockHeight).to.eq(blockTx.blockHeight); }); // skipping because it's the same test as the previous one with the pruning service invalidating the massive chain it.skip('should mark a massive chain of transactions invalid that were in the mempool, but no longer valid', async () => { // insert a valid tx, with a valid output await transaction_1.TransactionStorage.collection.insertOne(blockTx); await coin_1.CoinStorage.collection.insertOne(blockTxOutputs); const chainLength = 2000; const txids = await makeMempoolTxChain(chain, network, blockTxOutputs.spentTxid, chainLength); const allRelatedCoins = await transaction_1.TransactionStorage.findAllRelatedOutputs(blockTxOutputs.spentTxid); (0, chai_1.expect)(allRelatedCoins.length).to.eq(chainLength); const spentOps = new Array(); spentOps.push({ updateOne: { filter: { chain, network, mintIndex: blockTxOutputs.mintIndex, mintTxid: blockTxOutputs.mintTxid, spentHeight: { $lt: 0 } }, update: { $set: { spentHeight: block2TxOutputs.mintHeight, spentTxid: block2TxOutputs.mintTxid } } } }); await transaction_1.TransactionStorage.pruneMempool({ chain, network, initialSyncComplete: true, spendOps: spentOps }); const badTxs = await transaction_1.TransactionStorage.collection.find({ chain, network, txid: { $in: txids } }).toArray(); (0, chai_1.expect)(badTxs.length).to.eq(chainLength); (0, chai_1.expect)(badTxs.map(tx => tx.blockHeight)).to.deep.eq(new Array(chainLength).fill(-3 /* SpentHeightIndicators.conflicting */)); const goodTxs = await transaction_1.TransactionStorage.collection.find({ chain, network, txid: blockTx.txid }).toArray(); (0, chai_1.expect)(goodTxs.length).to.eq(1); (0, chai_1.expect)(goodTxs[0].txid).to.eq(blockTx.txid); (0, chai_1.expect)(goodTxs[0].blockHeight).to.eq(blockTx.blockHeight); }); describe('#batchImport', () => { const chain = 'ETH'; const network = 'regtest'; const wallet = new bson_1.ObjectId(); const address = '0x3Ec3dA6E14BE9518A9a6e92DdCC6ACfF2CEFf4ef'; beforeEach(async () => { await walletAddress_1.WalletAddressStorage.collection.insertOne({ chain, network, wallet, address, processed: true }); }); it('should update eth transactions with related wallet id correctly (incoming)', async () => { const block = unprocessedBlocksETH_1.unprocessedEthBlocks[0]; // block containing an eth transfer to 0x3Ec3dA6E14BE9518A9a6e92DdCC6ACfF2CEFf4ef await transaction_2.EVMTransactionStorage.batchImport({ ...block }); const walletTxs = await transaction_2.EVMTransactionStorage.collection.find({ chain, network, wallets: wallet }).toArray(); (0, chai_1.expect)(walletTxs.length).eq(1); }); it('should update erc20 transactions with related wallet id correctly (incoming)', async () => { const block = unprocessedBlocksETH_1.unprocessedEthBlocks[1]; // block containing an ERC20 transfer to 0x3Ec3dA6E14BE9518A9a6e92DdCC6ACfF2CEFf4ef await transaction_2.EVMTransactionStorage.batchImport({ ...block }); const walletTxs = await transaction_2.EVMTransactionStorage.collection.find({ chain, network, wallets: wallet }).toArray(); (0, chai_1.expect)(walletTxs.length).eq(1); }); }); }); //# sourceMappingURL=transaction.spec.js.map