bitcore-node
Version:
A blockchain indexing node with extended capabilities using bitcore
583 lines • 26 kB
JavaScript
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.TransactionStorage = exports.TransactionModel = exports.PruneMempoolStream = exports.MongoWriteStream = exports.MempoolTxEventTransform = exports.MempoolCoinEventTransform = exports.MempoolSafeTransform = void 0;
const lodash = __importStar(require("lodash"));
const stream_1 = require("stream");
const Loggify_1 = require("../decorators/Loggify");
const logger_1 = __importDefault(require("../logger"));
const libs_1 = require("../providers/libs");
const config_1 = require("../services/config");
const utils_1 = require("../utils");
const baseTransaction_1 = require("./baseTransaction");
const coin_1 = require("./coin");
const events_1 = require("./events");
const walletAddress_1 = require("./walletAddress");
const { onlyWalletEvents } = config_1.Config.get().services.event;
function shouldFire(obj) {
return !onlyWalletEvents || (onlyWalletEvents && obj.wallets && obj.wallets.length > 0);
}
const MAX_BATCH_SIZE = 50000;
const getUpdatedBatchIfMempool = (batch, height) => height >= 0 /* SpentHeightIndicators.minimum */ ? batch : batch.map(op => exports.TransactionStorage.toMempoolSafeUpsert(op, height));
class MempoolSafeTransform extends stream_1.Transform {
constructor(height) {
super({ objectMode: true });
this.height = height;
}
async _transform(coinBatch, _, done) {
done(null, getUpdatedBatchIfMempool(coinBatch, this.height));
}
}
exports.MempoolSafeTransform = MempoolSafeTransform;
class MempoolCoinEventTransform extends stream_1.Transform {
constructor(height) {
super({ objectMode: true });
this.height = height;
}
_transform(coinBatch, _, done) {
if (this.height < 0 /* SpentHeightIndicators.minimum */) {
const eventPayload = coinBatch
.map(coinOp => {
const coin = {
...coinOp.updateOne.update.$set,
...coinOp.updateOne.filter,
...coinOp.updateOne.update.$setOnInsert
};
const address = coin.address;
return { address, coin };
})
.filter(({ coin }) => shouldFire(coin));
events_1.EventStorage.signalAddressCoins(eventPayload);
}
done(null, coinBatch);
}
}
exports.MempoolCoinEventTransform = MempoolCoinEventTransform;
class MempoolTxEventTransform extends stream_1.Transform {
constructor(height) {
super({ objectMode: true });
this.height = height;
}
_transform(txBatch, _, done) {
if (this.height < 0 /* SpentHeightIndicators.minimum */) {
const eventPayload = txBatch
.map(op => ({ ...op.updateOne.update.$set, ...op.updateOne.filter, ...op.updateOne.update.$setOnInsert }))
.filter(shouldFire);
events_1.EventStorage.signalTxs(eventPayload);
}
done(null, txBatch);
}
}
exports.MempoolTxEventTransform = MempoolTxEventTransform;
class MongoWriteStream extends stream_1.Transform {
constructor(collection) {
super({ objectMode: true });
this.collection = collection;
}
async _transform(data, _, done) {
await Promise.all((0, utils_1.partition)(data, data.length / config_1.Config.get().maxPoolSize).map(batch => this.collection.bulkWrite(batch)));
done(null, data);
}
}
exports.MongoWriteStream = MongoWriteStream;
class PruneMempoolStream extends stream_1.Transform {
constructor(chain, network, initialSyncComplete) {
super({ objectMode: true });
this.chain = chain;
this.network = network;
this.initialSyncComplete = initialSyncComplete;
}
async _transform(spendOps, _, done) {
await exports.TransactionStorage.pruneMempool({
chain: this.chain,
network: this.network,
initialSyncComplete: this.initialSyncComplete,
spendOps
});
done(null, spendOps);
}
}
exports.PruneMempoolStream = PruneMempoolStream;
let TransactionModel = class TransactionModel extends baseTransaction_1.BaseTransaction {
constructor(storage) {
super(storage);
}
async batchImport(params) {
const { initialSyncComplete, height, chain, network } = params;
const mintStream = new stream_1.Readable({
objectMode: true,
read: () => { }
});
const spentStream = new stream_1.Readable({
objectMode: true,
read: () => { }
});
const txStream = new stream_1.Readable({
objectMode: true,
read: () => { }
});
this.streamMintOps({ ...params, mintStream });
await new Promise(r => mintStream
.pipe(new MempoolSafeTransform(height))
.pipe(new MongoWriteStream(coin_1.CoinStorage.collection))
.pipe(new MempoolCoinEventTransform(height))
.on('finish', r));
this.streamSpendOps({ ...params, spentStream });
await new Promise(r => spentStream
.pipe(new PruneMempoolStream(chain, network, initialSyncComplete))
.pipe(new MongoWriteStream(coin_1.CoinStorage.collection))
.on('finish', r));
this.streamTxOps({ ...params, txs: params.txs, txStream });
await new Promise(r => txStream
.pipe(new MempoolSafeTransform(height))
.pipe(new MongoWriteStream(exports.TransactionStorage.collection))
.pipe(new MempoolTxEventTransform(height))
.on('finish', r));
}
async streamTxOps(params) {
let { blockHash, blockTime, blockTimeNormalized, chain, height, network, parentChain, forkHeight, mempoolTime } = params;
if (parentChain && forkHeight && height < forkHeight) {
const parentTxs = await exports.TransactionStorage.collection
.find({ blockHeight: height, chain: parentChain, network })
.toArray();
params.txStream.push(parentTxs.map(parentTx => {
return {
updateOne: {
filter: { txid: parentTx.txid, chain, network },
update: {
$set: {
chain,
network,
blockHeight: height,
blockHash,
blockTime,
blockTimeNormalized,
coinbase: parentTx.coinbase,
fee: parentTx.fee,
size: parentTx.size,
locktime: parentTx.locktime,
inputCount: parentTx.inputCount,
outputCount: parentTx.outputCount,
value: parentTx.value,
wallets: [],
...(mempoolTime && { mempoolTime })
}
},
upsert: true,
forceServerObjectId: true
}
};
}));
}
else {
let spentQuery;
if (height > 0) {
spentQuery = { spentHeight: height, chain, network };
}
else {
spentQuery = { spentTxid: { $in: params.txs.map(tx => tx._hash) }, chain, network };
}
const spent = await coin_1.CoinStorage.collection
.find(spentQuery)
.project({ spentTxid: 1, value: 1, wallets: 1 })
.toArray();
const groupedSpends = spent.reduce((agg, coin) => {
if (!agg[coin.spentTxid]) {
agg[coin.spentTxid] = {
total: coin.value,
wallets: coin.wallets ? [...coin.wallets] : []
};
}
else {
agg[coin.spentTxid].total += coin.value;
agg[coin.spentTxid].wallets.push(...coin.wallets);
}
return agg;
}, {});
let txBatch = new Array();
for (let tx of params.txs) {
const txid = tx._hash;
const spent = groupedSpends[txid] || {};
const mintedWallets = tx.wallets || [];
const spentWallets = spent.wallets || [];
const txWallets = mintedWallets.concat(spentWallets);
const wallets = lodash.uniqBy(txWallets, wallet => wallet.toHexString());
let fee = 0;
if (groupedSpends[txid]) {
// TODO: Fee is negative for mempool txs
fee = groupedSpends[txid].total - tx.outputAmount;
if (fee < 0) {
logger_1.default.debug('Negative fee %o %o %o', txid, groupedSpends[txid], tx.outputAmount);
}
}
txBatch.push({
updateOne: {
filter: { txid, chain, network },
update: {
$set: {
chain,
network,
blockHeight: height,
blockHash,
blockTime,
blockTimeNormalized,
coinbase: tx.isCoinbase(),
fee,
size: tx.toBuffer().length,
locktime: tx.nLockTime,
inputCount: tx.inputs.length,
outputCount: tx.outputs.length,
value: tx.outputAmount,
wallets,
...(mempoolTime && { mempoolTime })
}
},
upsert: true,
forceServerObjectId: true
}
});
if (txBatch.length > MAX_BATCH_SIZE) {
params.txStream.push(txBatch);
txBatch = new Array();
}
}
if (txBatch.length) {
params.txStream.push(txBatch);
}
params.txStream.push(null);
}
}
async tagMintBatch(params) {
const { chain, network, initialSyncComplete, mintBatch } = params;
const walletConfig = config_1.Config.for('api').wallets;
if (initialSyncComplete || (walletConfig && walletConfig.allowCreationBeforeCompleteSync)) {
let addressBatch = new Set();
let wallets = [];
const findWalletsForAddresses = async (addresses) => {
let partialWallets = await walletAddress_1.WalletAddressStorage.collection
.find({ address: { $in: addresses }, chain, network }, { batchSize: 100 })
.project({ wallet: 1, address: 1 })
.toArray();
return partialWallets;
};
for (let mintOp of mintBatch) {
addressBatch.add(mintOp.updateOne.update.$set.address);
if (addressBatch.size >= 1000) {
const batchWallets = await findWalletsForAddresses(Array.from(addressBatch));
wallets = wallets.concat(batchWallets);
addressBatch.clear();
}
}
const remainingBatch = await findWalletsForAddresses(Array.from(addressBatch));
wallets = wallets.concat(remainingBatch);
if (wallets.length) {
for (let mintOp of mintBatch) {
let transformedWallets = wallets
.filter(wallet => wallet.address === mintOp.updateOne.update.$set.address)
.map(wallet => wallet.wallet);
mintOp.updateOne.update.$set.wallets = transformedWallets;
if (mintOp.updateOne.update.$setOnInsert) {
delete mintOp.updateOne.update.$setOnInsert.wallets;
if (!Object.keys(mintOp.updateOne.update.$setOnInsert).length) {
delete mintOp.updateOne.update.$setOnInsert;
}
}
}
for (let tx of params.txs) {
const coinsForTx = mintBatch.filter(mint => mint.updateOne.filter.mintTxid === tx._hash);
tx.wallets = coinsForTx.reduce((wallets, c) => {
wallets = wallets.concat(c.updateOne.update.$set.wallets);
return wallets;
}, new Array());
}
}
}
}
async streamMintOps(params) {
let { chain, height, network, parentChain, forkHeight } = params;
let parentChainCoinsMap = new Map();
if (parentChain && forkHeight && height < forkHeight) {
let parentChainCoins = await coin_1.CoinStorage.collection
.find({
chain: parentChain,
network,
mintHeight: height,
$or: [{ spentHeight: { $lt: 0 /* SpentHeightIndicators.minimum */ } }, { spentHeight: { $gte: forkHeight } }]
})
.project({ mintTxid: 1, mintIndex: 1 })
.toArray();
for (let parentChainCoin of parentChainCoins) {
parentChainCoinsMap.set(`${parentChainCoin.mintTxid}:${parentChainCoin.mintIndex}`, true);
}
}
let mintBatch = new Array();
for (let tx of params.txs) {
tx._hash = tx.hash;
let isCoinbase = tx.isCoinbase();
for (let [index, output] of tx.outputs.entries()) {
if (parentChain &&
forkHeight &&
height < forkHeight &&
(!parentChainCoinsMap.size || !parentChainCoinsMap.get(`${tx._hash}:${index}`))) {
continue;
}
let address = '';
if (output.script) {
address = output.script.toAddress(network).toString(true);
if (address === 'false' && output.script.classify() === 'Pay to public key') {
let hash = libs_1.Libs.get(chain).lib.crypto.Hash.sha256ripemd160(output.script.chunks[0].buf);
address = libs_1.Libs.get(chain)
.lib.Address(hash, network)
.toString(true);
}
}
mintBatch.push({
updateOne: {
filter: {
mintTxid: tx._hash,
mintIndex: index,
chain,
network
},
update: {
$set: {
chain,
network,
address,
mintHeight: height,
coinbase: isCoinbase,
value: output.satoshis,
script: output.script && output.script.toBuffer()
},
$setOnInsert: {
spentHeight: -2 /* SpentHeightIndicators.unspent */,
wallets: []
}
},
upsert: true,
forceServerObjectId: true
}
});
}
if (mintBatch.length >= MAX_BATCH_SIZE) {
await this.tagMintBatch({ ...params, mintBatch });
params.mintStream.push(mintBatch);
mintBatch = new Array();
}
}
if (mintBatch.length) {
await this.tagMintBatch({ ...params, mintBatch });
params.mintStream.push(mintBatch);
}
params.mintStream.push(null);
mintBatch = new Array();
}
streamSpendOps(params) {
let { chain, network, height, parentChain, forkHeight } = params;
if (parentChain && forkHeight && height < forkHeight) {
params.spentStream.push(null);
return;
}
let spendOpsBatch = new Array();
for (let tx of params.txs) {
if (tx.isCoinbase()) {
continue;
}
for (let input of tx.inputs) {
let inputObj = input.toObject();
const updateQuery = {
updateOne: {
filter: {
mintTxid: inputObj.prevTxId,
mintIndex: inputObj.outputIndex,
spentHeight: { $lt: 0 /* SpentHeightIndicators.minimum */ },
chain,
network
},
update: {
$set: { spentTxid: tx._hash || tx.hash, spentHeight: height, sequenceNumber: inputObj.sequenceNumber }
}
}
};
spendOpsBatch.push(updateQuery);
}
if (spendOpsBatch.length > MAX_BATCH_SIZE) {
params.spentStream.push(spendOpsBatch);
spendOpsBatch = new Array();
}
}
if (spendOpsBatch.length) {
params.spentStream.push(spendOpsBatch);
}
params.spentStream.push(null);
spendOpsBatch = new Array();
}
async findAllRelatedOutputs(forTx) {
const seen = {};
const allRelatedCoins = [];
const txCoins = await coin_1.CoinStorage.collection.find({ mintTxid: forTx, mintHeight: { $ne: -3 /* SpentHeightIndicators.conflicting */ } }).toArray();
for (let coin of txCoins) {
allRelatedCoins.push(coin);
seen[coin.mintTxid] = true;
if (coin.spentTxid && !seen[coin.spentTxid]) {
const outputs = await this.findAllRelatedOutputs(coin.spentTxid);
allRelatedCoins.push(...outputs);
}
}
return allRelatedCoins;
}
async *yieldRelatedOutputs(forTx) {
const seen = {};
const batchStream = coin_1.CoinStorage.collection.find({ mintTxid: forTx, mintHeight: { $ne: -3 /* SpentHeightIndicators.conflicting */ } });
let coin;
while (coin = (await batchStream.next())) {
seen[coin.mintTxid] = true;
yield coin;
if (coin.spentTxid && !seen[coin.spentTxid]) {
yield* this.yieldRelatedOutputs(coin.spentTxid);
seen[coin.spentTxid] = true;
}
}
}
async pruneMempool(params) {
const { chain, network, spendOps, initialSyncComplete } = params;
if (!initialSyncComplete || !spendOps.length) {
return;
}
const seenMinedTxids = new Set();
for (const spentOp of spendOps) {
const minedTxid = spentOp.updateOne.update.$set.spentTxid;
if (seenMinedTxids.has(minedTxid)) {
continue;
}
const conflictingInputsQuery = {
chain,
network,
spentHeight: -1 /* SpentHeightIndicators.pending */,
mintTxid: spentOp.updateOne.filter.mintTxid,
mintIndex: spentOp.updateOne.filter.mintIndex,
spentTxid: { $ne: minedTxid }
};
const conflictingInputsStream = coin_1.CoinStorage.collection.find(conflictingInputsQuery);
const seenInvalidTxids = new Set();
let input;
while ((input = await conflictingInputsStream.next())) {
if (seenInvalidTxids.has(input.spentTxid)) {
continue;
}
await this._invalidateTx({ chain, network, invalidTxid: input.spentTxid, replacedByTxid: minedTxid, simple: true });
seenInvalidTxids.add(input.spentTxid);
}
seenMinedTxids.add(minedTxid);
}
return;
}
async _invalidateTx(params) {
const { chain, network, invalidTxid, replacedByTxid, invalidParentTxids = [], simple } = params;
if (!simple) {
const spentOutputsQuery = {
chain,
network,
spentHeight: -1 /* SpentHeightIndicators.pending */,
mintTxid: invalidTxid
};
// spent outputs of invalid tx
const spentOutputsStream = coin_1.CoinStorage.collection.find(spentOutputsQuery);
const seenTxids = new Set();
let output;
while ((output = await spentOutputsStream.next())) {
if (!output.spentTxid || seenTxids.has(output.spentTxid)) {
continue;
}
// invalidate descendent tx (tx spending unconfirmed UTXO)
await this._invalidateTx({ chain, network, invalidTxid: output.spentTxid, invalidParentTxids: [...invalidParentTxids, invalidTxid], simple });
}
}
const setTx = { blockHeight: -3 /* SpentHeightIndicators.conflicting */ };
if (replacedByTxid) {
setTx.replacedByTxid = replacedByTxid;
}
await Promise.all([
// Tx
this.collection.updateMany({ chain, network, txid: invalidTxid }, { $set: setTx }),
// Tx Outputs
coin_1.CoinStorage.collection.updateMany({ chain, network, mintTxid: invalidTxid }, { $set: { mintHeight: -3 /* SpentHeightIndicators.conflicting */ } }),
// Tx Inputs
coin_1.CoinStorage.collection.updateMany(
// the `mintTxid: { $nin: invalidParentTxids }` ensures that an invalid parent tx's outputs aren't marked "unspent"
{ chain, network, spentTxid: invalidTxid, mintTxid: { $nin: invalidParentTxids }, spentHeight: -1 /* SpentHeightIndicators.pending */ }, { $set: { spentHeight: -2 /* SpentHeightIndicators.unspent */, spentTxid: '' } })
]);
}
_apiTransform(tx, options) {
const transaction = {
txid: tx.txid || '',
network: tx.network || '',
chain: tx.chain || '',
blockHeight: tx.blockHeight || -1,
blockHash: tx.blockHash || '',
blockTime: tx.blockTime ? tx.blockTime.toISOString() : '',
blockTimeNormalized: tx.blockTimeNormalized ? tx.blockTimeNormalized.toISOString() : '',
coinbase: tx.coinbase || false,
locktime: tx.locktime || -1,
inputCount: tx.inputCount || -1,
outputCount: tx.outputCount || -1,
size: tx.size || -1,
fee: tx.fee || -1,
value: tx.value || -1
};
if (tx.blockHeight === -3 /* SpentHeightIndicators.conflicting */) {
transaction.replacedByTxid = tx.replacedByTxid || '';
}
if (options && options.object) {
return transaction;
}
return JSON.stringify(transaction);
}
};
exports.TransactionModel = TransactionModel;
exports.TransactionModel = TransactionModel = __decorate([
Loggify_1.LoggifyClass
], TransactionModel);
exports.TransactionStorage = new TransactionModel();
//# sourceMappingURL=transaction.js.map