UNPKG

lisk-framework

Version:

Lisk blockchain application platform

148 lines 7.15 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.NetworkEndpoint = void 0; const lisk_chain_1 = require("@liskhq/lisk-chain"); const lisk_codec_1 = require("@liskhq/lisk-codec"); const lisk_utils_1 = require("@liskhq/lisk-utils"); const lisk_validator_1 = require("@liskhq/lisk-validator"); const lisk_cryptography_1 = require("@liskhq/lisk-cryptography"); const constants_1 = require("./constants"); const schema_1 = require("./schema"); const schema_2 = require("./certificate_generation/schema"); const base_network_endpoint_1 = require("../network/base_network_endpoint"); const constants_2 = require("./certificate_generation/constants"); const metrics_1 = require("../metrics/metrics"); const DEFAULT_SINGLE_COMMIT_FROM_IDS_RATE_LIMIT_FREQUENCY = 10; const DEFAULT_LAST_BLOCK_RATE_LIMIT_FREQUENCY = 10; const DEFAULT_COMMON_BLOCK_RATE_LIMIT_FREQUENCY = 10; const DEFAULT_BLOCKS_FROM_IDS_RATE_LIMIT_FREQUENCY = 100; class NetworkEndpoint extends base_network_endpoint_1.BaseNetworkEndpoint { constructor(args) { super(args.network); this._metrics = { eventSingleCommit: metrics_1.defaultMetrics.counter('consensus_handleEventSingleCommit'), }; this._logger = args.logger; this._chain = args.chain; this._network = args.network; this._commitPool = args.commitPool; this._db = args.db; } handleRPCGetLastBlock(peerId) { this.addRateLimit(constants_1.NETWORK_RPC_GET_LAST_BLOCK, peerId, DEFAULT_LAST_BLOCK_RATE_LIMIT_FREQUENCY); return this._chain.lastBlock.getBytes(); } async handleRPCGetBlocksFromId(data, peerId) { this.addRateLimit(constants_1.NETWORK_RPC_GET_BLOCKS_FROM_ID, peerId, DEFAULT_BLOCKS_FROM_IDS_RATE_LIMIT_FREQUENCY); let decodedData; try { decodedData = lisk_codec_1.codec.decode(schema_1.getBlocksFromIdRequestSchema, data); } catch (error) { this._logger.warn({ err: error, req: data, peerID: peerId, }, `${constants_1.NETWORK_RPC_GET_BLOCKS_FROM_ID} response failed on decoding. Applying a penalty to the peer`); this._network.applyPenaltyOnPeer({ peerId, penalty: 100, }); throw error; } try { lisk_validator_1.validator.validate(schema_1.getBlocksFromIdRequestSchema, decodedData); } catch (error) { this._logger.warn({ err: error, req: data, peerID: peerId, }, `${constants_1.NETWORK_RPC_GET_BLOCKS_FROM_ID} response failed on validation. Applying a penalty to the peer`); this._network.applyPenaltyOnPeer({ peerId, penalty: 100, }); throw error; } const { blockId } = decodedData; const lastBlock = await this._chain.dataAccess.getBlockHeaderByID(blockId); const lastBlockHeight = lastBlock.height; const fetchUntilHeight = lastBlockHeight + 103; const blocks = await this._chain.dataAccess.getBlocksByHeightBetween(lastBlockHeight + 1, fetchUntilHeight); const encodedBlocks = blocks.map(block => block.getBytes()); return lisk_codec_1.codec.encode(schema_1.getBlocksFromIdResponseSchema, { blocks: encodedBlocks }); } async handleRPCGetHighestCommonBlock(data, peerId) { this.addRateLimit(constants_1.NETWORK_RPC_GET_HIGHEST_COMMON_BLOCK, peerId, DEFAULT_COMMON_BLOCK_RATE_LIMIT_FREQUENCY); const blockIds = lisk_codec_1.codec.decode(schema_1.getHighestCommonBlockRequestSchema, data); const logDataAndApplyPenalty = (errData) => { this._logger.warn(errData, 'getHighestCommonBlock request validation failed. Applying a penalty to the peer'); this._network.applyPenaltyOnPeer({ peerId, penalty: 100, }); }; try { lisk_validator_1.validator.validate(schema_1.getHighestCommonBlockRequestSchema, blockIds); } catch (error) { logDataAndApplyPenalty({ err: error, req: data }); throw error; } if (!lisk_utils_1.objects.bufferArrayUniqueItems(blockIds.ids)) { logDataAndApplyPenalty({ req: data }); } const commonBlockHeaderID = await this._chain.dataAccess.getHighestCommonBlockID(blockIds.ids); return lisk_codec_1.codec.encode(schema_1.getHighestCommonBlockResponseSchema, { id: commonBlockHeaderID !== null && commonBlockHeaderID !== void 0 ? commonBlockHeaderID : Buffer.alloc(0), }); } async handleEventSingleCommit(data, peerId) { this._metrics.eventSingleCommit.inc(); this.addRateLimit(constants_2.NETWORK_EVENT_COMMIT_MESSAGES, peerId, DEFAULT_SINGLE_COMMIT_FROM_IDS_RATE_LIMIT_FREQUENCY); if (!Buffer.isBuffer(data)) { const errorMessage = 'Received invalid single commit data. Applying a penalty to the peer'; this._logger.warn({ peerId }, errorMessage); this.network.applyPenaltyOnPeer({ peerId, penalty: 100, }); throw new Error(errorMessage); } const stateStore = new lisk_chain_1.StateStore(this._db); try { const singleCommitsNetworkPacket = lisk_codec_1.codec.decode(schema_2.singleCommitsNetworkPacketSchema, data); for (const encodedCommit of singleCommitsNetworkPacket.commits) { const singleCommit = lisk_codec_1.codec.decode(schema_2.singleCommitSchema, encodedCommit); lisk_validator_1.validator.validate(schema_2.singleCommitSchema, singleCommit); const isValidCommit = await this._commitPool.validateCommit(stateStore, singleCommit); if (!isValidCommit) { this._logger.trace({ validatorAddress: lisk_cryptography_1.address.getLisk32AddressFromAddress(singleCommit.validatorAddress), height: singleCommit.height, }, 'Received single commit is invalid'); continue; } this._commitPool.addCommit(singleCommit); this._logger.debug({ validatorAddress: lisk_cryptography_1.address.getLisk32AddressFromAddress(singleCommit.validatorAddress), height: singleCommit.height, }, 'Added received single commit to the pool'); } } catch (error) { this._logger.warn({ err: error, peerID: peerId, }, `${constants_2.NETWORK_EVENT_COMMIT_MESSAGES} fail to verify single commit. Applying a penalty to the peer`); this._network.applyPenaltyOnPeer({ peerId, penalty: 100, }); throw error; } } } exports.NetworkEndpoint = NetworkEndpoint; //# sourceMappingURL=network_endpoint.js.map