bitcore-node
Version:
A blockchain indexing node with extended capabilities using bitcore
332 lines • 14.5 kB
JavaScript
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.BitcoinP2PWorker = void 0;
const events_1 = require("events");
const logger_1 = __importStar(require("../../logger"));
const block_1 = require("../../models/block");
const state_1 = require("../../models/state");
const transaction_1 = require("../../models/transaction");
const chain_state_1 = require("../../providers/chain-state");
const libs_1 = require("../../providers/libs");
const p2p_1 = require("../../services/p2p");
const utils_1 = require("../../utils");
class BitcoinP2PWorker extends p2p_1.BaseP2PWorker {
constructor({ chain, network, chainConfig, blockModel = block_1.BitcoinBlockStorage }) {
super({ chain, network, chainConfig, blockModel });
this.blockModel = blockModel;
this.chain = chain;
this.network = network;
this.bitcoreLib = libs_1.Libs.get(chain).lib;
this.bitcoreP2p = libs_1.Libs.get(chain).p2p;
this.chainConfig = chainConfig;
this.events = new events_1.EventEmitter();
this.isSyncing = false;
this.initialSyncComplete = false;
this.invCache = {};
this.invCacheLimits = {
[this.bitcoreP2p.Inventory.TYPE.BLOCK]: 100,
[this.bitcoreP2p.Inventory.TYPE.TX]: 100000
};
this.messages = new this.bitcoreP2p.Messages({
network: this.bitcoreLib.Networks.get(this.network)
});
this.pool = new this.bitcoreP2p.Pool({
addrs: this.chainConfig.trustedPeers.map(peer => {
return {
ip: {
v4: peer.host
},
port: peer.port
};
}),
dnsSeed: false,
listenAddr: false,
network: this.network,
messages: this.messages
});
}
cacheInv(type, hash) {
if (!this.invCache[type]) {
this.invCache[type] = [];
}
if (this.invCache[type].length > this.invCacheLimits[type]) {
this.invCache[type].shift();
}
this.invCache[type].push(hash);
}
isCachedInv(type, hash) {
if (!this.invCache[type]) {
this.invCache[type] = [];
}
return this.invCache[type].includes(hash);
}
setupListeners() {
this.pool.on('peerready', peer => {
logger_1.default.info(`${(0, logger_1.timestamp)()} | Connected to peer: ${peer.host}:${peer.port.toString().padEnd(5)} | Chain: ${this.chain} | Network: ${this.network}`);
});
this.pool.on('peerconnect', peer => {
logger_1.default.info(`${(0, logger_1.timestamp)()} | Connected to peer: ${peer.host}:${peer.port.toString().padEnd(5)} | Chain: ${this.chain} | Network: ${this.network}`);
});
this.pool.on('peerdisconnect', peer => {
logger_1.default.warn(`${(0, logger_1.timestamp)()} | Not connected to peer: ${peer.host}:${peer.port.toString().padEnd(5)} | Chain: ${this.chain} | Network: ${this.network}`);
});
this.pool.on('peertx', async (peer, message) => {
const hash = message.transaction.hash;
logger_1.default.debug('peer tx received: %o', {
peer: `${peer.host}:${peer.port}`,
chain: this.chain,
network: this.network,
hash
});
if (this.isSyncingNode && !this.isCachedInv(this.bitcoreP2p.Inventory.TYPE.TX, hash)) {
this.cacheInv(this.bitcoreP2p.Inventory.TYPE.TX, hash);
await this.processTransaction(message.transaction);
this.events.emit('transaction', message.transaction);
}
});
this.pool.on('peerblock', async (peer, message) => {
const { block } = message;
const { hash } = block;
logger_1.default.debug('peer block received: %o', {
peer: `${peer.host}:${peer.port}`,
chain: this.chain,
network: this.network,
hash
});
const blockInCache = this.isCachedInv(this.bitcoreP2p.Inventory.TYPE.BLOCK, hash);
if (!blockInCache) {
block.transactions.forEach(transaction => this.cacheInv(this.bitcoreP2p.Inventory.TYPE.TX, transaction.hash));
this.cacheInv(this.bitcoreP2p.Inventory.TYPE.BLOCK, hash);
}
if (this.isSyncingNode && (!blockInCache || this.isSyncing)) {
this.events.emit(hash, message.block);
this.events.emit('block', message.block);
if (!this.isSyncing) {
this.sync();
}
}
});
this.pool.on('peerheaders', (peer, message) => {
logger_1.default.debug('peerheaders message received: %o', {
peer: `${peer.host}:${peer.port}`,
chain: this.chain,
network: this.network,
count: message.headers.length
});
this.events.emit('headers', message.headers);
});
this.pool.on('peerinv', (peer, message) => {
if (this.isSyncingNode) {
const filtered = message.inventory.filter(inv => {
const hash = this.bitcoreLib.encoding
.BufferReader(inv.hash)
.readReverse()
.toString('hex');
return !this.isCachedInv(inv.type, hash);
});
if (filtered.length) {
peer.sendMessage(this.messages.GetData(filtered));
}
}
});
}
async connect() {
this.setupListeners();
this.pool.connect();
this.connectInterval = setInterval(this.pool.connect.bind(this.pool), 5000);
return new Promise(resolve => {
this.pool.once('peerready', () => resolve());
});
}
async disconnect() {
this.pool.removeAllListeners();
this.pool.disconnect();
if (this.connectInterval) {
clearInterval(this.connectInterval);
}
}
async getHeaders(candidateHashes) {
let received = false;
return new Promise(async (resolve) => {
this.events.once('headers', headers => {
received = true;
resolve(headers);
});
while (!received) {
this.pool.sendMessage(this.messages.GetHeaders({ starts: candidateHashes }));
await (0, utils_1.wait)(1000);
}
});
}
async getBlock(hash) {
logger_1.default.debug('Getting block, hash:', hash);
let received = false;
return new Promise(async (resolve) => {
this.events.once(hash, (block) => {
logger_1.default.debug('Received block, hash: %o', hash);
received = true;
resolve(block);
});
while (!received) {
this.pool.sendMessage(this.messages.GetData.forBlock(hash));
await (0, utils_1.wait)(1000);
}
});
}
getBestPoolHeight() {
let best = 0;
for (const peer of Object.values(this.pool._connectedPeers)) {
if (peer.bestHeight > best) {
best = peer.bestHeight;
}
}
return best;
}
async processBlock(block) {
await this.blockModel.addBlock({
chain: this.chain,
network: this.network,
forkHeight: this.chainConfig.forkHeight,
parentChain: this.chainConfig.parentChain,
initialSyncComplete: this.initialSyncComplete,
block,
initialHeight: this.chainConfig.syncStartHeight
});
}
async processTransaction(tx) {
const now = new Date();
await transaction_1.TransactionStorage.batchImport({
chain: this.chain,
network: this.network,
txs: [tx],
height: -1 /* SpentHeightIndicators.pending */,
mempoolTime: now,
blockTime: now,
blockTimeNormalized: now,
initialSyncComplete: true
});
}
async syncDone() {
return new Promise(resolve => this.events.once('SYNCDONE', resolve));
}
async sync() {
if (this.isSyncing) {
return false;
}
this.isSyncing = true;
const { chain, chainConfig, network } = this;
const { parentChain, forkHeight } = chainConfig;
const state = await state_1.StateStorage.collection.findOne({});
this.initialSyncComplete = state?.initialSyncComplete?.includes(`${chain}:${network}`);
let tip = await chain_state_1.ChainStateProvider.getLocalTip({ chain, network });
if (parentChain && (!tip || tip.height < forkHeight)) {
let parentTip = await chain_state_1.ChainStateProvider.getLocalTip({ chain: parentChain, network });
while (!parentTip || parentTip.height < forkHeight) {
logger_1.default.info(`Waiting until ${parentChain} syncs before ${chain} ${network}`);
await (0, utils_1.wait)(5000);
parentTip = await chain_state_1.ChainStateProvider.getLocalTip({ chain: parentChain, network });
}
}
const getHeaders = async () => {
let locators = await chain_state_1.ChainStateProvider.getLocatorHashes({ chain, network });
if (locators.length === 1 && locators[0] === Array(65).join('0') && this.chainConfig.syncStartHash) {
locators = [this.chainConfig.syncStartHash];
}
return this.getHeaders(locators);
};
let headers = await getHeaders();
while (headers.length > 0) {
tip = await chain_state_1.ChainStateProvider.getLocalTip({ chain, network });
let currentHeight = tip?.height ?? (this.chainConfig.syncStartHeight || 0);
const startingHeight = currentHeight;
const startingTime = Date.now();
let lastLog = startingTime;
logger_1.default.info(`${(0, logger_1.timestamp)()} | Syncing ${headers.length} blocks | Chain: ${chain} | Network: ${network}`);
// Default starting hash is the genesis block +1. If we have no blocks, we need to fetch the genesis block
if (currentHeight == 0 && headers[0]) {
const block = await this.getBlock(headers[0].hash);
if (block.header.prevHash) {
const prevHash = Buffer.from(block.header.prevHash).reverse().toString('hex');
const genesisBlock = await this.getBlock(prevHash);
await this.processBlock(genesisBlock);
currentHeight++;
}
}
for (const header of headers) {
try {
const block = await this.getBlock(header.hash);
await this.processBlock(block);
currentHeight++;
const now = Date.now();
const oneSecond = 1000;
if (now - lastLog > oneSecond) {
const blocksProcessed = currentHeight - startingHeight;
const elapsedMinutes = (now - startingTime) / (60 * oneSecond);
logger_1.default.info(`${(0, logger_1.timestamp)()} | Syncing... | Chain: ${chain} | Network: ${network} |${(blocksProcessed / elapsedMinutes)
.toFixed(2)
.padStart(8)} blocks/min | Height: ${currentHeight.toString().padStart(7)}`);
lastLog = now;
}
}
catch (err) {
logger_1.default.error(`${(0, logger_1.timestamp)()} | Error syncing | Chain: ${chain} | Network: ${network} | %o`, err);
this.isSyncing = false;
return this.sync();
}
}
headers = await getHeaders();
}
logger_1.default.info(`${(0, logger_1.timestamp)()} | Sync completed | Chain: ${chain} | Network: ${network}`);
this.isSyncing = false;
await state_1.StateStorage.collection.findOneAndUpdate({}, { $addToSet: { initialSyncComplete: `${chain}:${network}` } }, { upsert: true });
this.events.emit('SYNCDONE');
return true;
}
async stop() {
this.stopping = true;
logger_1.default.debug(`Stopping worker for chain ${this.chain}`);
this.queuedRegistrations.forEach(clearTimeout);
await this.unregisterSyncingNode();
await this.disconnect();
}
async start() {
logger_1.default.debug(`Started worker for chain ${this.chain}`);
await this.connect();
this.refreshSyncingNode();
}
}
exports.BitcoinP2PWorker = BitcoinP2PWorker;
//# sourceMappingURL=p2p.js.map