lisk-framework
Version:
Lisk blockchain application platform
766 lines • 36.9 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Consensus = void 0;
const events_1 = require("events");
const lisk_chain_1 = require("@liskhq/lisk-chain");
const lisk_utils_1 = require("@liskhq/lisk-utils");
const lisk_db_1 = require("@liskhq/lisk-db");
const lisk_codec_1 = require("@liskhq/lisk-codec");
const lisk_cryptography_1 = require("@liskhq/lisk-cryptography");
const synchronizer_1 = require("./synchronizer");
const errors_1 = require("../../errors");
const errors_2 = require("./synchronizer/errors");
const network_endpoint_1 = require("./network_endpoint");
const network_endpoint_2 = require("../legacy/network_endpoint");
const schema_1 = require("./schema");
const constants_1 = require("./constants");
const fork_choice_rule_1 = require("./fork_choice/fork_choice_rule");
const commit_pool_1 = require("./certificate_generation/commit_pool");
const abi_1 = require("../../abi");
const utils_1 = require("./utils");
const constants_2 = require("./certificate_generation/constants");
const metrics_1 = require("../metrics/metrics");
const BLOCK_VERSION = 2;
const forkStatusList = [
fork_choice_rule_1.ForkStatus.IDENTICAL_BLOCK,
fork_choice_rule_1.ForkStatus.VALID_BLOCK,
fork_choice_rule_1.ForkStatus.DOUBLE_FORGING,
fork_choice_rule_1.ForkStatus.TIE_BREAK,
fork_choice_rule_1.ForkStatus.DIFFERENT_CHAIN,
fork_choice_rule_1.ForkStatus.DISCARD,
];
class Consensus {
constructor(args) {
this._metrics = {
height: metrics_1.defaultMetrics.gauge('consensus_height'),
finalizedHeight: metrics_1.defaultMetrics.gauge('consensus_finalizedHeight'),
maxHeightPrevoted: metrics_1.defaultMetrics.gauge('consensus_maxHeightPrevoted'),
maxHeightCertified: metrics_1.defaultMetrics.gauge('consensus_maxHeightCertified'),
maxRemovalHeight: metrics_1.defaultMetrics.gauge('consensus_maxRemovalHeight'),
blockExecution: metrics_1.defaultMetrics.histogram('consensus_blockExecution', [0.01, 0.05, 0.1, 0.2, 0.5, 1, 5]),
fork: metrics_1.defaultMetrics.counter('consensus_fork'),
};
this._stop = false;
this.events = new events_1.EventEmitter();
this._chain = args.chain;
this._abi = args.abi;
this._network = args.network;
this._mutex = new lisk_utils_1.jobHandlers.Mutex();
this._bft = args.bft;
this._genesisConfig = args.genesisConfig;
this._systemConfig = args.systemConfig;
this._inclusionProofKeys = args.systemConfig.inclusionProofKeys.map(k => Buffer.from(k, 'hex'));
}
async init(args) {
this._logger = args.logger;
this._db = args.db;
this._commitPool = new commit_pool_1.CommitPool({
db: this._db,
blockTime: this._genesisConfig.blockTime,
minCertifyHeight: this._genesisConfig.minimumCertifyHeight,
bftMethod: this._bft.method,
chain: this._chain,
network: this._network,
});
this._endpoint = new network_endpoint_1.NetworkEndpoint({
chain: this._chain,
logger: this._logger,
network: this._network,
db: this._db,
commitPool: this._commitPool,
});
this._legacyEndpoint = new network_endpoint_2.LegacyNetworkEndpoint({
logger: this._logger,
network: this._network,
db: args.legacyDB,
});
const blockExecutor = this._createBlockExecutor();
const blockSyncMechanism = new synchronizer_1.BlockSynchronizationMechanism({
chain: this._chain,
logger: this._logger,
network: this._network,
blockExecutor,
});
const fastChainSwitchMechanism = new synchronizer_1.FastChainSwitchingMechanism({
chain: this._chain,
logger: this._logger,
network: this._network,
blockExecutor,
});
this._synchronizer = new synchronizer_1.Synchronizer({
chainModule: this._chain,
logger: this._logger,
blockExecutor,
mechanisms: [blockSyncMechanism, fastChainSwitchMechanism],
});
await this._bft.init(this._genesisConfig.bftBatchSize, this._genesisConfig.blockTime);
this._network.registerEndpoint(constants_1.NETWORK_LEGACY_GET_BLOCKS_FROM_ID, async ({ data, peerId }) => this._legacyEndpoint.handleRPCGetLegacyBlocksFromID(data, peerId));
this._network.registerEndpoint(constants_1.NETWORK_RPC_GET_LAST_BLOCK, ({ peerId }) => this._endpoint.handleRPCGetLastBlock(peerId));
this._network.registerEndpoint(constants_1.NETWORK_RPC_GET_BLOCKS_FROM_ID, async ({ data, peerId }) => this._endpoint.handleRPCGetBlocksFromId(data, peerId));
this._network.registerEndpoint(constants_1.NETWORK_RPC_GET_HIGHEST_COMMON_BLOCK, async ({ data, peerId }) => this._endpoint.handleRPCGetHighestCommonBlock(data, peerId));
this._network.registerHandler(constants_1.NETWORK_EVENT_POST_BLOCK, ({ data, peerId }) => {
this.onBlockReceive(data, peerId).catch(err => {
this._logger.error({ err: err, peerId }, 'Fail to handle received block');
});
});
this._network.registerHandler(constants_2.NETWORK_EVENT_COMMIT_MESSAGES, ({ data, peerId }) => {
this._endpoint.handleEventSingleCommit(data, peerId).catch(err => {
this._logger.error({ err: err, peerId }, 'Fail to handle received single commits');
});
});
this._network.registerHandler(constants_1.NETWORK_EVENT_POST_NODE_INFO, ({ data, peerId }) => {
this._logger.debug({ peerId, data }, 'Received new node info');
});
this._logger.debug({
id: args.genesisBlock.header.id,
transactionRoot: args.genesisBlock.header.transactionRoot,
}, 'Initializing consensus component.');
const genesisExist = await this._chain.genesisBlockExist(args.genesisBlock);
const stateStore = new lisk_chain_1.StateStore(this._db);
if (!genesisExist) {
args.genesisBlock.validateGenesis();
const genesisEvents = await this._executeGenesisBlock(stateStore, args.genesisBlock);
const bftParams = await this._bft.method.getBFTParameters(stateStore, args.genesisBlock.header.height + 1);
if (!args.genesisBlock.header.validatorsHash ||
!bftParams.validatorsHash.equals(args.genesisBlock.header.validatorsHash)) {
throw new Error('Genesis block validators hash is invalid');
}
await this._verifyValidatorsHash(stateStore, args.genesisBlock);
await this._verifyEventRoot(args.genesisBlock, genesisEvents);
const batch = new lisk_db_1.Batch();
const diff = stateStore.finalize(batch);
await this._chain.saveBlock(args.genesisBlock, genesisEvents, { batch, diff, stateStore }, args.genesisBlock.header.height);
}
await this._chain.loadLastBlocks(args.genesisBlock);
this._logger.info('Consensus component ready.');
}
syncing() {
return this._synchronizer.isActive;
}
finalizedHeight() {
return this._chain.finalizedHeight;
}
async onBlockReceive(data, peerId) {
if (this.syncing()) {
this._logger.debug("Client is syncing. Can't process new block at the moment.");
return;
}
if (!Buffer.isBuffer(data)) {
const errorMessage = 'Received invalid post block data. Applying a penalty to the peer';
this._logger.warn({
peerId,
error: '',
}, errorMessage);
this._network.applyPenaltyOnPeer({
peerId,
penalty: 100,
});
return;
}
let decodedData;
try {
decodedData = lisk_codec_1.codec.decode(schema_1.postBlockEventSchema, data);
}
catch (error) {
this._logger.warn({
err: error,
data,
}, 'Received post block broadcast request in unexpected format. Applying a penalty to the peer');
this._network.applyPenaltyOnPeer({
peerId,
penalty: 100,
});
throw error;
}
const { block: blockBytes } = decodedData;
let block;
try {
block = lisk_chain_1.Block.fromBytes(blockBytes);
}
catch (error) {
this._logger.warn({
err: error,
data,
}, 'Received post block broadcast request in not decodable format. Applying a penalty to the peer');
this._network.applyPenaltyOnPeer({
peerId,
penalty: 100,
});
throw error;
}
this.events.emit(constants_1.CONSENSUS_EVENT_NETWORK_BLOCK_NEW, { block });
try {
const endExecuteMetrics = this._metrics.blockExecution.startTimer();
await this._execute(block, peerId);
endExecuteMetrics();
}
catch (error) {
if (error instanceof errors_1.ApplyPenaltyError) {
this._logger.warn({
err: error,
data,
}, 'Received post block broadcast request with invalid block. Applying a penalty to the peer');
this._network.applyPenaltyOnPeer({
peerId,
penalty: 100,
});
}
throw error;
}
}
async execute(block) {
try {
await this._execute(block, '127.0.0.1:0');
}
catch (error) {
await this._abi.clear({});
this._logger.error({ err: error }, 'Fail to execute block.');
}
}
async start() {
this._commitPool.start();
this._endpoint.start();
}
async stop() {
this._stop = true;
await this._mutex.acquire();
this._endpoint.stop();
this._commitPool.stop();
}
async getAggregateCommit(methodContext) {
const aggCommit = await this._commitPool.getAggregateCommit(methodContext);
return aggCommit;
}
certifySingleCommit(blockHeader, validatorInfo) {
const singleCommit = this._commitPool.createSingleCommit(blockHeader, validatorInfo, this._chain.chainID);
this._commitPool.addCommit(singleCommit, true);
}
isSynced(height, maxHeightPrevoted) {
const lastBlockHeader = this._chain.lastBlock.header;
if (lastBlockHeader.version === 0) {
return height <= lastBlockHeader.height && maxHeightPrevoted <= lastBlockHeader.height;
}
return (maxHeightPrevoted < lastBlockHeader.maxHeightPrevoted ||
(maxHeightPrevoted === lastBlockHeader.maxHeightPrevoted && height < lastBlockHeader.height));
}
async getMaxRemovalHeight() {
return this._commitPool.getMaxRemovalHeight();
}
async _execute(block, peerID) {
if (this._stop) {
return;
}
await this._mutex.runExclusive(async () => {
this._logger.debug({ id: block.header.id, height: block.header.height }, 'Starting to process block');
const { lastBlock } = this._chain;
const forkStatus = (0, fork_choice_rule_1.forkChoice)(block.header, lastBlock.header, this._bft.method);
if (!forkStatusList.includes(forkStatus)) {
this._logger.debug({ status: forkStatus, blockId: block.header.id }, 'Unknown fork status');
throw new Error('Unknown fork status');
}
if (forkStatus === fork_choice_rule_1.ForkStatus.DISCARD) {
this._logger.debug({ id: block.header.id, height: block.header.height }, 'Discarding block');
this._logger.info({
id: block.header.id,
height: block.header.height,
generator: lisk_cryptography_1.address.getLisk32AddressFromAddress(block.header.generatorAddress),
maxHeightPrevoted: block.header.maxHeightPrevoted,
maxHeightGenerated: block.header.maxHeightGenerated,
}, 'Discarding the block');
this._metrics.fork.inc();
this.events.emit(constants_1.CONSENSUS_EVENT_FORK_DETECTED, {
block,
});
return;
}
if (forkStatus === fork_choice_rule_1.ForkStatus.IDENTICAL_BLOCK) {
this._logger.debug({ id: block.header.id, height: block.header.height }, 'Block already processed');
return;
}
if (forkStatus === fork_choice_rule_1.ForkStatus.DOUBLE_FORGING) {
this._logger.warn({
id: block.header.id,
generatorAddress: lisk_cryptography_1.address.getLisk32AddressFromAddress(block.header.generatorAddress),
}, 'Discarding block due to double forging');
this._logger.info({
id: block.header.id,
height: block.header.height,
generator: lisk_cryptography_1.address.getLisk32AddressFromAddress(block.header.generatorAddress),
maxHeightPrevoted: block.header.maxHeightPrevoted,
maxHeightGenerated: block.header.maxHeightGenerated,
}, 'Detected a fork');
this._metrics.fork.inc();
this.events.emit(constants_1.CONSENSUS_EVENT_FORK_DETECTED, {
block,
});
return;
}
if (forkStatus === fork_choice_rule_1.ForkStatus.DIFFERENT_CHAIN) {
this._logger.debug({ id: block.header.id, height: block.header.height }, 'Detected different chain to sync');
this._logger.info({
id: block.header.id,
height: block.header.height,
generator: lisk_cryptography_1.address.getLisk32AddressFromAddress(block.header.generatorAddress),
maxHeightPrevoted: block.header.maxHeightPrevoted,
maxHeightGenerated: block.header.maxHeightGenerated,
}, 'Detected a fork');
this.events.emit(constants_1.CONSENSUS_EVENT_FORK_DETECTED, {
block,
});
this._metrics.fork.inc();
await this._sync(block, peerID);
return;
}
if (forkStatus === fork_choice_rule_1.ForkStatus.TIE_BREAK) {
this._logger.info({ id: lastBlock.header.id, height: lastBlock.header.height }, 'Received tie breaking block');
this._logger.info({
id: block.header.id,
height: block.header.height,
generator: lisk_cryptography_1.address.getLisk32AddressFromAddress(block.header.generatorAddress),
maxHeightPrevoted: block.header.maxHeightPrevoted,
maxHeightGenerated: block.header.maxHeightGenerated,
}, 'Detected a fork');
this.events.emit(constants_1.CONSENSUS_EVENT_FORK_DETECTED, {
block,
});
try {
this._chain.validateBlock(block, {
version: BLOCK_VERSION,
});
}
catch (error) {
throw new errors_1.ApplyPenaltyError(error);
}
const previousLastBlock = lisk_utils_1.objects.cloneDeep(lastBlock);
await this._deleteBlock(lastBlock);
try {
await this._executeValidated(block);
}
catch (err) {
this._logger.error({
id: block.header.id,
previousBlockId: previousLastBlock.header.id,
err: err,
}, 'Failed to apply newly received block. restoring previous block.');
await this._executeValidated(previousLastBlock, {
skipBroadcast: true,
});
}
return;
}
this._logger.debug({ id: block.header.id, height: block.header.height }, 'Processing valid block');
try {
this._chain.validateBlock(block, {
version: BLOCK_VERSION,
});
}
catch (error) {
throw new errors_1.ApplyPenaltyError(error);
}
await this._executeValidated(block);
this._network.applyNodeInfo({
height: block.header.height,
lastBlockID: block.header.id,
maxHeightPrevoted: block.header.maxHeightPrevoted,
blockVersion: block.header.version,
});
this._metrics.height.set(block.header.height);
this._metrics.finalizedHeight.set(this._chain.finalizedHeight);
this._metrics.maxHeightCertified.set(block.header.aggregateCommit.height);
this._metrics.maxHeightPrevoted.set(block.header.maxHeightPrevoted);
try {
if ((this._systemConfig.keepInclusionProofsForHeights > 0 ||
this._systemConfig.keepInclusionProofsForHeights === -1) &&
this._inclusionProofKeys.length > 0) {
this._logger.info(`Starting saving inclusion proof at height ${block.header.height}`);
const result = await this._abi.prove({
keys: this._inclusionProofKeys,
stateRoot: block.header.stateRoot,
});
await this._chain.dataAccess.setInclusionProofs(result.proof, block.header.height);
this._logger.info(`Successfully set inclusion proof at height ${block.header.height}`);
}
}
catch (error) {
this._logger.error({ err: error }, 'Failed to save inclusion proof for the given keys.');
}
});
}
async _executeValidated(block, options = {}) {
var _a;
const stateStore = new lisk_chain_1.StateStore(this._db);
let contextID;
try {
await this._verify(block);
contextID = await this._verifyAssets(block);
}
catch (error) {
throw new errors_1.ApplyPenaltyError(error);
}
if (!options.skipBroadcast) {
this._network.send({
event: constants_1.NETWORK_EVENT_POST_BLOCK,
data: lisk_codec_1.codec.encode(schema_1.postBlockEventSchema, { block: block.getBytes() }),
});
this._logger.debug({
id: block.header.id,
height: block.header.height,
generator: lisk_cryptography_1.address.getLisk32AddressFromAddress(block.header.generatorAddress),
numberOfTransactions: block.transactions.length,
numberOfAssets: block.assets.getAll().length,
}, 'Block broadcasted');
this.events.emit(constants_1.CONSENSUS_EVENT_BLOCK_BROADCAST, {
block,
});
}
const events = await this._executeBlock(contextID, stateStore, block);
const bftVotes = await this._bft.method.getBFTHeights(stateStore);
let { finalizedHeight } = this._chain;
let finalizedHeightChangeRange;
if (bftVotes.maxHeightPrecommitted > finalizedHeight) {
finalizedHeightChangeRange = {
from: finalizedHeight,
to: bftVotes.maxHeightPrecommitted,
};
finalizedHeight = bftVotes.maxHeightPrecommitted;
}
await this._verifyValidatorsHash(stateStore, block);
await this._verifyEventRoot(block, events);
const batch = new lisk_db_1.Batch();
const diff = stateStore.finalize(batch);
await this._commitBlock(contextID, block);
await this._chain.saveBlock(block, events, { batch, diff, stateStore }, finalizedHeight, {
removeFromTempTable: (_a = options.removeFromTempTable) !== null && _a !== void 0 ? _a : false,
});
const isFinalizedHeightChanged = !!finalizedHeightChangeRange;
if (isFinalizedHeightChanged) {
await this._abi.finalize({
finalizedHeight,
});
this.events.emit(constants_1.CONSENSUS_EVENT_FINALIZED_HEIGHT_CHANGED, finalizedHeightChangeRange);
}
this.events.emit(constants_1.CONSENSUS_EVENT_BLOCK_NEW, { block });
this._logger.info({
id: block.header.id,
height: block.header.height,
generator: lisk_cryptography_1.address.getLisk32AddressFromAddress(block.header.generatorAddress),
numberOfTransactions: block.transactions.length,
numberOfAssets: block.assets.getAll().length,
numberOfEvents: events.length,
}, 'Block executed');
return block;
}
async _verify(block) {
const stateStore = new lisk_chain_1.StateStore(this._db);
this._verifyTimestamp(block);
this._verifyBlockHeight(block);
this._verifyPreviousBlockID(block);
await this._verifyGeneratorAddress(stateStore, block);
await this._verifyBFTProperties(stateStore, block);
await this._verifyBlockSignature(stateStore, block);
await this._verifyAggregateCommit(stateStore, block);
}
_verifyTimestamp(block) {
const blockSlotNumber = this._bft.method.getSlotNumber(block.header.timestamp);
const currentTimestamp = Math.floor(Date.now() / 1000);
const currentSlotNumber = this._bft.method.getSlotNumber(currentTimestamp);
if (blockSlotNumber > currentSlotNumber) {
throw new Error(`Invalid timestamp ${block.header.timestamp} of the block with id: ${block.header.id.toString('hex')}`);
}
const { lastBlock } = this._chain;
const previousBlockSlotNumber = this._bft.method.getSlotNumber(lastBlock.header.timestamp);
if (blockSlotNumber <= previousBlockSlotNumber) {
throw new Error(`Invalid timestamp ${block.header.timestamp} of the block with id: ${block.header.id.toString('hex')}`);
}
}
_verifyPreviousBlockID(block) {
const { lastBlock } = this._chain;
if (!block.header.previousBlockID.equals(lastBlock.header.id)) {
throw new Error(`Invalid previousBlockID ${block.header.previousBlockID.toString('hex')} of the block with id: ${block.header.id.toString('hex')}`);
}
}
_verifyBlockHeight(block) {
const { lastBlock } = this._chain;
if (block.header.height !== lastBlock.header.height + 1) {
throw new Error(`Invalid height ${block.header.height} of the block with id: ${block.header.id.toString('hex')}`);
}
}
async _verifyGeneratorAddress(stateStore, block) {
if (block.header.generatorAddress.length !== 20) {
throw new Error(`Invalid length of generatorAddress ${block.header.generatorAddress.toString('hex')} of the block with id: ${block.header.id.toString('hex')}`);
}
const expectedGenerator = await this._bft.method.getGeneratorAtTimestamp(stateStore, block.header.height, block.header.timestamp);
if (!block.header.generatorAddress.equals(expectedGenerator.address)) {
throw new Error(`Generator with address ${block.header.generatorAddress.toString('hex')} of the block with id: ${block.header.id.toString('hex')} is ineligible to generate block for the current slot`);
}
}
async _verifyBFTProperties(stateStore, block) {
const bftParams = await this._bft.method.getBFTHeights(stateStore);
if (block.header.maxHeightPrevoted !== bftParams.maxHeightPrevoted) {
throw new Error(`Invalid maxHeightPrevoted ${block.header.maxHeightPrevoted} of the block with id: ${block.header.id.toString('hex')}`);
}
const isContradictingHeaders = await this._bft.method.isHeaderContradictingChain(stateStore, block.header);
if (isContradictingHeaders) {
throw new Error(`Contradicting headers for the block with id: ${block.header.id.toString('hex')}`);
}
const implyMaxPrevote = await this._bft.method.impliesMaximalPrevotes(stateStore, block.header);
if (block.header.impliesMaxPrevotes !== implyMaxPrevote) {
throw new Error('Invalid imply max prevote.');
}
}
async _verifyBlockSignature(stateStore, block) {
const bftParams = await this._bft.method.getBFTParameters(stateStore, block.header.height);
const generator = bftParams.validators.find(validator => validator.address.equals(block.header.generatorAddress));
if (!generator) {
throw new Error(`Validator with address ${block.header.generatorAddress.toString('hex')} does not exist for height ${block.header.height}`);
}
try {
block.header.validateSignature(generator.generatorKey, this._chain.chainID);
}
catch (error) {
throw new Error(`Invalid signature ${block.header.signature.toString('hex')} of the block with id: ${block.header.id.toString('hex')}`);
}
}
async _verifyAggregateCommit(stateStore, block) {
if (!block.header.aggregateCommit) {
throw new Error(`Aggregate Commit is "undefined" for the block with id: ${block.header.id.toString('hex')}`);
}
const isVerified = await this._commitPool.verifyAggregateCommit(stateStore, block.header.aggregateCommit);
if (!isVerified) {
throw new Error(`Invalid aggregateCommit for the block with id: ${block.header.id.toString('hex')}`);
}
}
async _verifyValidatorsHash(methodContext, block) {
var _a;
if (!block.header.validatorsHash) {
throw new Error(`Validators hash is "undefined" for the block with id: ${block.header.id.toString('hex')}`);
}
const { validatorsHash } = await this._bft.method.getBFTParameters(methodContext, block.header.height + 1);
if (!block.header.validatorsHash.equals(validatorsHash)) {
throw new Error(`Invalid validatorsHash ${(_a = block.header.validatorsHash) === null || _a === void 0 ? void 0 : _a.toString('hex')} of the block with id: ${block.header.id.toString('hex')}`);
}
}
async _verifyEventRoot(block, events) {
if (events.length > lisk_chain_1.MAX_EVENTS_PER_BLOCK) {
throw new Error(`Number of events cannot exceed ${lisk_chain_1.MAX_EVENTS_PER_BLOCK} per block`);
}
const smt = new lisk_db_1.SparseMerkleTree(lisk_chain_1.EVENT_KEY_LENGTH);
const keypairs = [];
for (const e of events) {
const pairs = e.keyPair();
for (const pair of pairs) {
keypairs.push(pair);
}
}
const eventRoot = await smt.update(constants_1.EMPTY_HASH, keypairs);
if (!block.header.eventRoot || !eventRoot.equals(block.header.eventRoot)) {
throw new Error(`Event root is not valid for the block with id: ${block.header.id.toString('hex')}`);
}
}
async _deleteBlock(block, saveTempBlock = false) {
if (block.header.height <= this._chain.finalizedHeight) {
throw new Error('Can not delete block below or same as finalized height');
}
let expectedStateRoot = constants_1.EMPTY_HASH;
if (block.header.height - 1 > 0) {
const secondLastBlockHeader = await this._chain.dataAccess.getBlockHeaderByHeight(block.header.height - 1);
expectedStateRoot = secondLastBlockHeader.stateRoot;
}
try {
this._logger.debug({ height: block.header.height }, 'Reverting the block');
const { contextID } = await this._abi.initStateMachine({
header: block.header.toObject(),
});
await this._abi.revert({
contextID,
expectedStateRoot,
stateRoot: block.header.stateRoot,
});
}
finally {
await this._abi.clear({});
}
const stateStore = new lisk_chain_1.StateStore(this._db);
const batch = new lisk_db_1.Batch();
await this._chain.removeBlock(block, { batch, diff: { created: [], updated: [], deleted: [] }, stateStore }, { saveTempBlock });
this.events.emit(constants_1.CONSENSUS_EVENT_BLOCK_DELETE, { block });
}
async _deleteLastBlock({ saveTempBlock = false } = {}) {
if (this._stop) {
return;
}
const { lastBlock } = this._chain;
this._logger.debug({ id: lastBlock.header.id, height: lastBlock.header.height }, 'Deleting last block');
await this._deleteBlock(lastBlock, saveTempBlock);
}
async _sync(block, peerID) {
if (this._stop) {
return;
}
try {
await this._synchronizer.run(block, peerID);
}
catch (error) {
if (error instanceof errors_2.ApplyPenaltyAndRestartError) {
this._logger.warn({ error, reason: error.reason }, 'Applying penalty and restarting synchronization');
this._network.applyPenaltyOnPeer({ peerId: peerID, penalty: 100 });
await this._sync(block, peerID);
return;
}
if (error instanceof errors_2.RestartError) {
this._logger.info({ error, reason: error.reason }, 'Restarting synchronization');
await this._sync(block, peerID);
return;
}
if (error instanceof errors_2.AbortError) {
this._logger.info({ error, reason: error.reason }, 'Aborting synchronization mechanism');
return;
}
throw error;
}
}
_createBlockExecutor() {
const stateStore = new lisk_chain_1.StateStore(this._db);
return {
deleteLastBlock: async (options = {}) => this._deleteLastBlock(options),
executeValidated: async (block, options) => this._executeValidated(block, options),
validate: (block) => this._chain.validateBlock(block, {
version: BLOCK_VERSION,
}),
verify: async (block) => this._verify(block),
getCurrentValidators: async () => {
const nextHeight = this._chain.lastBlock.header.height + 1;
const bftParams = await this._bft.method.getBFTParameters(stateStore, nextHeight);
return bftParams.validators;
},
getSlotNumber: timestamp => this._bft.method.getSlotNumber(timestamp),
getFinalizedHeight: () => this.finalizedHeight(),
};
}
async _verifyAssets(block) {
try {
const { contextID } = await this._abi.initStateMachine({
header: block.header.toObject(),
});
await this._abi.verifyAssets({
contextID,
assets: block.assets.getAll(),
});
return contextID;
}
catch (err) {
await this._abi.clear({});
throw err;
}
}
async _executeBlock(contextID, stateStore, block) {
try {
const maxRemovalHeight = await this.getMaxRemovalHeight();
this._metrics.maxRemovalHeight.set(maxRemovalHeight);
await this._bft.beforeTransactionsExecute(stateStore, block.header, maxRemovalHeight);
const events = [];
const beforeResult = await this._abi.beforeTransactionsExecute({
contextID,
assets: block.assets.getAll(),
});
events.push(...beforeResult.events);
for (const transaction of block.transactions) {
const { result: verifyResult } = await this._abi.verifyTransaction({
contextID,
transaction: transaction.toObject(),
header: block.header.toObject(),
onlyCommand: false,
});
if (verifyResult !== abi_1.TransactionVerifyResult.OK) {
this._logger.debug(`Failed to verify transaction ${transaction.id.toString('hex')}`);
throw new Error(`Failed to verify transaction ${transaction.id.toString('hex')}.`);
}
const txExecResult = await this._abi.executeTransaction({
contextID,
assets: block.assets.getAll(),
dryRun: false,
header: block.header.toObject(),
transaction: transaction.toObject(),
});
if (txExecResult.result === abi_1.TransactionExecutionResult.INVALID) {
this._logger.debug(`Failed to execute transaction ${transaction.id.toString('hex')}`);
throw new Error(`Failed to execute transaction ${transaction.id.toString('hex')}.`);
}
events.push(...txExecResult.events);
}
const afterResult = await this._abi.afterTransactionsExecute({
contextID,
assets: block.assets.getAll(),
transactions: block.transactions.map(tx => tx.toObject()),
});
events.push(...afterResult.events);
if (!(0, utils_1.isEmptyConsensusUpdate)(afterResult.preCommitThreshold, afterResult.certificateThreshold, afterResult.nextValidators)) {
await this._bft.method.setBFTParameters(stateStore, afterResult.preCommitThreshold, afterResult.certificateThreshold, afterResult.nextValidators);
this.events.emit(constants_1.CONSENSUS_EVENT_VALIDATORS_CHANGED, {
preCommitThreshold: afterResult.preCommitThreshold,
certificateThreshold: afterResult.certificateThreshold,
nextValidators: afterResult.nextValidators,
});
}
return events.map((e, i) => {
const event = new lisk_chain_1.Event(e);
event.setIndex(i);
return event;
});
}
catch (err) {
await this._abi.clear({});
throw err;
}
}
async _commitBlock(contextID, block) {
try {
await this._abi.commit({
contextID,
dryRun: false,
expectedStateRoot: block.header.stateRoot,
stateRoot: this._chain.lastBlock.header.stateRoot,
});
}
finally {
await this._abi.clear({});
}
}
async _executeGenesisBlock(stateStore, genesisBlock) {
try {
const { contextID } = await this._abi.initStateMachine({
header: genesisBlock.header.toObject(),
});
if (!genesisBlock.header.stateRoot) {
throw new Error('Genesis block stateRoot must not be empty.');
}
await this._bft.initGenesisState(stateStore, genesisBlock.header);
const result = await this._abi.initGenesisState({
contextID,
stateRoot: genesisBlock.header.stateRoot,
});
await this._bft.method.setBFTParameters(stateStore, result.preCommitThreshold, result.certificateThreshold, result.nextValidators);
this.events.emit(constants_1.CONSENSUS_EVENT_VALIDATORS_CHANGED, {
preCommitThreshold: result.preCommitThreshold,
certificateThreshold: result.certificateThreshold,
nextValidators: result.nextValidators,
});
await this._abi.commit({
contextID,
dryRun: false,
stateRoot: lisk_cryptography_1.utils.hash(Buffer.alloc(0)),
expectedStateRoot: genesisBlock.header.stateRoot,
});
return result.events.map((e, i) => {
const event = new lisk_chain_1.Event(e);
event.setIndex(i);
return event;
});
}
finally {
await this._abi.clear({});
}
}
}
exports.Consensus = Consensus;
//# sourceMappingURL=consensus.js.map