@bsv/wallet-toolbox-client
Version:
Client only Wallet Storage
253 lines • 11.8 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChaintracksStorageBase = void 0;
const sdk_1 = require("../../../../sdk");
const blockHeaderUtilities_1 = require("../util/blockHeaderUtilities");
const BulkFileDataManager_1 = require("../util/BulkFileDataManager");
/**
* Required interface methods of a Chaintracks Storage Engine implementation.
*/
class ChaintracksStorageBase {
static createStorageBaseOptions(chain) {
const options = {
chain,
liveHeightThreshold: 2000,
reorgHeightThreshold: 400,
bulkMigrationChunkSize: 500,
batchInsertLimit: 400,
bulkFileDataManager: undefined
};
return options;
}
constructor(options) {
this.isAvailable = false;
this.hasMigrated = false;
this.nowMigratingLiveToBulk = false;
this.chain = options.chain;
this.liveHeightThreshold = options.liveHeightThreshold;
this.reorgHeightThreshold = options.reorgHeightThreshold;
this.bulkMigrationChunkSize = options.bulkMigrationChunkSize;
this.batchInsertLimit = options.batchInsertLimit;
this.bulkManager =
options.bulkFileDataManager || new BulkFileDataManager_1.BulkFileDataManager(BulkFileDataManager_1.BulkFileDataManager.createDefaultOptions(this.chain));
}
async shutdown() {
/* base class does notning */
}
async makeAvailable() {
if (this.isAvailable)
return;
this.isAvailable = true;
}
async migrateLatest() {
this.hasMigrated = true;
}
async dropAllData() {
await this.bulkManager.deleteBulkFiles();
await this.makeAvailable();
}
// BASE CLASS IMPLEMENTATIONS - MAY BE OVERRIDEN
async deleteBulkBlockHeaders() {
await this.bulkManager.deleteBulkFiles();
}
async getAvailableHeightRanges() {
await this.makeAvailable();
const bulk = await this.bulkManager.getHeightRange();
const live = await this.getLiveHeightRange();
if (bulk.isEmpty) {
if (!live.isEmpty && live.minHeight !== 0)
throw new Error('With empty bulk storage, live storage must start with genesis header.');
}
else {
if (bulk.minHeight != 0)
throw new Error("Bulk storage doesn't start with genesis header.");
if (!live.isEmpty && bulk.maxHeight + 1 !== live.minHeight)
throw new Error('There is a gap or overlap between bulk and live header storage.');
}
return { bulk, live };
}
async pruneLiveBlockHeaders(activeTipHeight) {
await this.makeAvailable();
try {
const minHeight = this.lastActiveMinHeight || (await this.findLiveHeightRange()).minHeight;
let totalCount = activeTipHeight - minHeight + 1 - this.liveHeightThreshold;
while (totalCount >= this.bulkMigrationChunkSize) {
const count = Math.min(totalCount, this.bulkMigrationChunkSize);
await this.migrateLiveToBulk(count);
totalCount -= count;
this.lastActiveMinHeight = undefined;
}
}
catch (err) {
console.log(err);
throw err;
}
}
async findChainTipHash() {
await this.makeAvailable();
const tip = await this.findChainTipHeader();
return tip.hash;
}
async findChainTipWork() {
await this.makeAvailable();
const tip = await this.findChainTipHeader();
return tip.chainWork;
}
async findChainWorkForBlockHash(hash) {
await this.makeAvailable();
const header = await this.findLiveHeaderForBlockHash(hash);
if (header !== null)
return header.chainWork;
throw new Error(`Header with hash of ${hash} was not found in the live headers database.`);
}
async findBulkFilesHeaderForHeightOrUndefined(height) {
await this.makeAvailable();
return this.bulkManager.findHeaderForHeightOrUndefined(height);
}
async findHeaderForHeightOrUndefined(height) {
await this.makeAvailable();
if (isNaN(height) || height < 0 || Math.ceil(height) !== height)
throw new sdk_1.WERR_INVALID_PARAMETER('height', `a non-negative integer (${height}).`);
const liveHeader = await this.findLiveHeaderForHeight(height);
if (liveHeader !== null)
return liveHeader;
const header = await this.findBulkFilesHeaderForHeightOrUndefined(height);
return header;
}
async findHeaderForHeight(height) {
await this.makeAvailable();
const header = await this.findHeaderForHeightOrUndefined(height);
if (header)
return header;
throw new Error(`Header with height of ${height} was not found.`);
}
async isMerkleRootActive(merkleRoot) {
await this.makeAvailable();
const header = await this.findLiveHeaderForMerkleRoot(merkleRoot);
return header ? header.isActive : false;
}
async findCommonAncestor(header1, header2) {
await this.makeAvailable();
/*eslint no-constant-condition: ["error", { "checkLoops": false }]*/
while (true) {
if (header1.previousHeaderId === null || header2.previousHeaderId === null)
throw new Error('Reached start of live database without resolving the reorg.');
if (header1.previousHeaderId === header2.previousHeaderId)
return await this.findLiveHeaderForHeaderId(header1.previousHeaderId);
const backupHeader1 = header1.height >= header2.height;
if (header2.height >= header1.height)
header2 = await this.findLiveHeaderForHeaderId(header2.previousHeaderId);
if (backupHeader1)
header1 = await this.findLiveHeaderForHeaderId(header1.previousHeaderId);
}
}
async findReorgDepth(header1, header2) {
await this.makeAvailable();
const ancestor = await this.findCommonAncestor(header1, header2);
return Math.max(header1.height, header2.height) - ancestor.height;
}
async migrateLiveToBulk(count, ignoreLimits = false) {
await this.makeAvailable();
if (!ignoreLimits && count > this.bulkMigrationChunkSize)
return;
if (this.nowMigratingLiveToBulk) {
console.log('Already migrating live to bulk.');
return;
}
try {
this.nowMigratingLiveToBulk = true;
const headers = await this.liveHeadersForBulk(count);
await this.addLiveHeadersToBulk(headers);
await this.deleteOlderLiveBlockHeaders(headers.slice(-1)[0].height);
}
finally {
this.nowMigratingLiveToBulk = false;
}
}
async addBulkHeaders(headers, bulkRange, priorLiveHeaders) {
await this.makeAvailable();
if (!headers || headers.length === 0)
return priorLiveHeaders;
// Get the current extent of validated bulk and live block headers.
const before = await this.getAvailableHeightRanges();
const bulkFiles = this.bulkManager;
// Review `headers`, applying the following rules:
// 1. Height must be outside the current bulk HeightRange.
// 2. Height must not exceed presentHeight - liveHeightThreshold. If presentHeight is unknown, use maximum height across all headers.
// 3. Compute chainWork for each header.
// 4. Verify chain of header hash and previousHash values. One header at each height. Retain chain with most chainWork.
const minHeight = !bulkRange.isEmpty ? bulkRange.minHeight : before.bulk.isEmpty ? 0 : before.bulk.maxHeight + 1;
const filteredHeaders = headers.concat(priorLiveHeaders || []).filter(h => h.height >= minHeight);
const sortedHeaders = filteredHeaders.sort((a, b) => a.height - b.height);
const liveHeaders = sortedHeaders.filter(h => bulkRange.isEmpty || !bulkRange.contains(h.height));
if (liveHeaders.length === sortedHeaders.length) {
// All headers are live, no bulk headers to add.
return liveHeaders;
}
const chains = [];
for (const h of sortedHeaders) {
const dupe = chains.find(c => {
const lh = c.headers[c.headers.length - 1];
return lh.hash === h.hash;
});
if (dupe)
continue;
const chainWork = (0, blockHeaderUtilities_1.convertBitsToWork)(h.bits);
let chain = chains.find(c => {
const lh = c.headers[c.headers.length - 1];
return lh.height + 1 === h.height && lh.hash === h.previousHash;
});
if (chain) {
chain.headers.push(h);
chain.chainWork = (0, blockHeaderUtilities_1.addWork)(chain.chainWork, chainWork);
if (h.height <= bulkRange.maxHeight) {
chain.bulkChainWork = chain.chainWork;
}
continue;
}
// Since headers are assumed to be sorted by height,
// if this header doesn't extend an existing chain,
// it may be a branch from the previous header.
chain = chains.find(c => {
const lh = c.headers[c.headers.length - 2];
return lh.height + 1 === h.height && lh.hash === h.previousHash;
});
if (chain) {
// This header competes with tip of `chain`.
// Create a new chain with this header as the tip.
const headers = chain.headers.slice(0, -1);
headers.push(h);
const otherHeaderChainWork = (0, blockHeaderUtilities_1.convertBitsToWork)(chain.headers[chain.headers.length - 1].bits);
const newChainWork = (0, blockHeaderUtilities_1.addWork)((0, blockHeaderUtilities_1.subWork)(chain.chainWork, otherHeaderChainWork), chainWork);
const newChain = {
headers,
chainWork: newChainWork,
bulkChainWork: h.height <= bulkRange.maxHeight ? newChainWork : undefined
};
chains.push(newChain);
continue;
}
// Starting a new chain
chains.push({ headers: [h], chainWork, bulkChainWork: h.height <= bulkRange.maxHeight ? chainWork : undefined });
}
// Find the chain with the most chainWork.
const bestChain = chains.reduce((best, c) => ((0, blockHeaderUtilities_1.isMoreWork)(c.chainWork, best.chainWork) ? c : best), chains[0]);
const newBulkHeaders = bestChain.headers.slice(0, bulkRange.maxHeight - bestChain.headers[0].height + 1);
await this.addBulkHeadersFromBestChain(newBulkHeaders, bestChain);
return liveHeaders;
}
async addBulkHeadersFromBestChain(newBulkHeaders, bestChain) {
if (!bestChain.bulkChainWork) {
throw new sdk_1.WERR_INTERNAL(`bulkChainWork is not defined for the best chain with height ${bestChain.headers[0].height}`);
}
await this.bulkManager.mergeIncrementalBlockHeaders(newBulkHeaders, bestChain.bulkChainWork);
}
async addLiveHeadersToBulk(liveHeaders) {
if (liveHeaders.length === 0)
return;
const lastChainWork = liveHeaders.slice(-1)[0].chainWork;
await this.bulkManager.mergeIncrementalBlockHeaders(liveHeaders, lastChainWork);
}
}
exports.ChaintracksStorageBase = ChaintracksStorageBase;
//# sourceMappingURL=ChaintracksStorageBase.js.map