@bsv/wallet-toolbox-client
Version:
Client only Wallet Storage
465 lines • 19.4 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChaintracksStorageIdb = void 0;
const ChaintracksStorageBase_1 = require("./ChaintracksStorageBase");
const blockHeaderUtilities_1 = require("../util/blockHeaderUtilities");
const HeightRange_1 = require("../util/HeightRange");
const WERR_errors_1 = require("../../../../sdk/WERR_errors");
const idb_1 = require("idb");
class ChaintracksStorageIdb extends ChaintracksStorageBase_1.ChaintracksStorageBase {
constructor(options) {
super(options);
this.allStores = ['live_headers', 'bulk_headers'];
this.dbName = `chaintracks-${this.chain}net`;
}
async makeAvailable() {
if (this.isAvailable && this.hasMigrated)
return;
// Not a base class policy, but we want to ensure migrations are run before getting to business.
if (!this.hasMigrated) {
await this.migrateLatest();
}
if (!this.isAvailable) {
await super.makeAvailable();
// Connect the bulk data file manager to the table provided by this storage class.
await this.bulkManager.setStorage(this, this.log);
}
}
async migrateLatest() {
if (this.db)
return;
this.db = await this.initDB();
await super.migrateLatest();
}
async destroy() { }
async deleteLiveBlockHeaders() {
var _a;
await this.makeAvailable();
await ((_a = this.db) === null || _a === void 0 ? void 0 : _a.clear('live_headers'));
}
/**
* Delete live headers with height less or equal to `maxHeight`
*
* Set existing headers with previousHeaderId value set to the headerId value of
* a header which is to be deleted to null.
*
* @param maxHeight delete all records with less or equal `height`
* @returns number of deleted records
*/
async deleteOlderLiveBlockHeaders(maxHeight) {
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['live_headers']);
const store = trx.objectStore('live_headers');
const heightIndex = store.index('height');
const previousHeaderIdIndex = store.index('previousHeaderId');
// Get all headers with height <= maxHeight
const range = IDBKeyRange.upperBound(maxHeight);
const headersToDelete = await heightIndex.getAll(range);
const headerIdsToDelete = new Set(headersToDelete.map(header => header.headerId));
const deletedCount = headersToDelete.length;
for (const id of headerIdsToDelete) {
const headerToUpdate = await previousHeaderIdIndex.get(id);
await store.put({ ...headerToUpdate, previousHeaderId: null });
}
// Delete the headers
for (const id of headerIdsToDelete) {
await store.delete(id);
}
await trx.done;
return deletedCount;
}
/**
* @returns the active chain tip header
* @throws an error if there is no tip.
*/
async findChainTipHeader() {
const header = await this.findChainTipHeaderOrUndefined();
if (!header)
throw new Error('Database contains no active chain tip header.');
return header;
}
/**
*
* @returns the active chain tip header
* @throws an error if there is no tip.
*/
async findChainTipHeaderOrUndefined() {
await this.makeAvailable();
const trx = this.toDbTrxReadOnly(['live_headers']);
const store = trx.objectStore('live_headers');
const activeTipIndex = store.index('activeTip');
let header = await activeTipIndex.get([1, 1]);
header = this.repairStoredLiveHeader(header);
await trx.done;
return header;
}
async findLiveHeaderForBlockHash(hash) {
await this.makeAvailable();
const trx = this.toDbTrxReadOnly(['live_headers']);
const store = trx.objectStore('live_headers');
const hashIndex = store.index('hash');
let header = await hashIndex.get(hash);
header = this.repairStoredLiveHeader(header);
await trx.done;
return header;
}
async findLiveHeaderForHeaderId(headerId) {
await this.makeAvailable();
const trx = this.toDbTrxReadOnly(['live_headers']);
const store = trx.objectStore('live_headers');
let header = await store.get(headerId);
header = this.repairStoredLiveHeader(header);
await trx.done;
return header;
}
async findLiveHeaderForHeight(height) {
await this.makeAvailable();
const trx = this.toDbTrxReadOnly(['live_headers']);
const store = trx.objectStore('live_headers');
const heightIndex = store.index('height');
let header = await heightIndex.get(height);
header = this.repairStoredLiveHeader(header);
await trx.done;
return header || null;
}
async findLiveHeaderForMerkleRoot(merkleRoot) {
await this.makeAvailable();
const trx = this.toDbTrxReadOnly(['live_headers']);
const store = trx.objectStore('live_headers');
const merkleRootIndex = store.index('merkleRoot');
let header = await merkleRootIndex.get(merkleRoot);
header = this.repairStoredLiveHeader(header);
await trx.done;
return header || null;
}
async findLiveHeightRange() {
await this.makeAvailable();
const trx = this.toDbTrxReadOnly(['live_headers']);
const store = trx.objectStore('live_headers');
const heightIndex = store.index('height');
const minCursor = await heightIndex.openCursor(null, 'next');
const minValue = minCursor ? minCursor.value.height : null;
const maxCursor = await heightIndex.openCursor(null, 'prev');
const maxValue = maxCursor ? maxCursor.value.height : null;
const range = minValue === null || maxValue === null ? HeightRange_1.HeightRange.empty : new HeightRange_1.HeightRange(minValue, maxValue);
await trx.done;
return range;
}
async findMaxHeaderId() {
await this.makeAvailable();
const trx = this.toDbTrxReadOnly(['live_headers']);
const store = trx.objectStore('live_headers');
const maxCursor = await store.openKeyCursor(null, 'prev');
const maxValue = maxCursor ? Number(maxCursor.key) : 0;
await trx.done;
return maxValue;
}
async liveHeadersForBulk(count) {
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['live_headers']);
const store = trx.objectStore('live_headers');
const heightIndex = store.index('height');
let cursor = await heightIndex.openCursor(null, 'next');
const headers = [];
while (cursor && count > 0) {
const header = this.repairStoredLiveHeader(cursor.value);
if (header && header.isActive) {
count--;
headers.push(header);
}
cursor = await cursor.continue();
}
await trx.done;
return headers;
}
async getLiveHeaders(range) {
if (range.isEmpty)
return [];
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['live_headers']);
const store = trx.objectStore('live_headers');
const heightIndex = store.index('height');
let cursor = await heightIndex.openCursor(null, 'next');
const headers = [];
while (cursor) {
const header = this.repairStoredLiveHeader(cursor.value);
if (header && range.contains(header.height)) {
headers.push(header);
}
cursor = await cursor.continue();
}
await trx.done;
return headers;
}
async insertHeader(header) {
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['live_headers']);
const store = trx.objectStore('live_headers');
const hashIndex = store.index('hash');
const activeTipIndex = store.index('activeTip');
const r = {
added: false,
dupe: false,
noPrev: false,
badPrev: false,
noActiveAncestor: false,
isActiveTip: false,
reorgDepth: 0,
priorTip: undefined,
noTip: false,
deactivatedHeaders: []
};
// Check for duplicate
if (await hashIndex.get(header.hash)) {
r.dupe = true;
await trx.done;
return r;
}
//let all = await store.getAll()
//console.log(`idb store length: ${all.length} last: ${all[all.length - 1]?.height}`)
//let allHash = await hashIndex.getAll()
// Find previous header
let oneBack = this.repairStoredLiveHeader(await hashIndex.get(header.previousHash));
if (!oneBack) {
// Check if this is first live header
const count = await store.count();
if (count === 0) {
// If this is the first live header, the last bulk header (if there is one) is the previous header.
const lbf = await this.bulkManager.getLastFile();
if (!lbf)
throw new WERR_errors_1.WERR_INVALID_OPERATION('bulk headers must exist before first live header can be added');
if (header.previousHash === lbf.lastHash && header.height === lbf.firstHeight + lbf.count) {
// Valid first live header. Add it.
const chainWork = (0, blockHeaderUtilities_1.addWork)(lbf.lastChainWork, (0, blockHeaderUtilities_1.convertBitsToWork)(header.bits));
r.isActiveTip = true;
const newHeader = {
...header,
headerId: 0,
previousHeaderId: null,
chainWork,
isChainTip: r.isActiveTip,
isActive: r.isActiveTip
};
const h = this.prepareStoredLiveHeader(newHeader, true);
newHeader.headerId = Number(await store.add(h));
r.added = true;
await trx.done;
return r;
}
}
// Failure without a oneBack
// First live header that does not follow last bulk header or
// Not the first live header and live headers doesn't include a previousHash header.
r.noPrev = true;
await trx.done;
return r;
}
if (oneBack.isActive && oneBack.isChainTip) {
r.priorTip = oneBack;
}
else {
r.priorTip = this.repairStoredLiveHeader(await activeTipIndex.get([1, 1]));
}
if (!r.priorTip) {
// No active chain tip found. This is a logic error in state of live headers.
r.noTip = true;
await trx.done;
return r;
}
// We have an acceptable new live header...and live headers has an active chain tip.
const chainWork = (0, blockHeaderUtilities_1.addWork)(oneBack.chainWork, (0, blockHeaderUtilities_1.convertBitsToWork)(header.bits));
r.isActiveTip = (0, blockHeaderUtilities_1.isMoreWork)(chainWork, r.priorTip.chainWork);
const newHeader = {
...header,
headerId: 0,
previousHeaderId: oneBack.headerId,
chainWork,
isChainTip: r.isActiveTip,
isActive: r.isActiveTip
};
if (r.isActiveTip) {
let activeAncestor = oneBack;
while (!activeAncestor.isActive) {
const previousHeader = this.repairStoredLiveHeader(await store.get(activeAncestor.previousHeaderId));
if (!previousHeader) {
r.noActiveAncestor = true;
await trx.done;
return r;
}
activeAncestor = previousHeader;
}
if (!(oneBack.isActive && oneBack.isChainTip)) {
r.reorgDepth = Math.min(r.priorTip.height, header.height) - activeAncestor.height;
}
if (activeAncestor.headerId !== oneBack.headerId) {
// Deactivate reorg'ed headers
let headerToDeactivate = this.repairStoredLiveHeader(await activeTipIndex.get([1, 1]));
while (headerToDeactivate && headerToDeactivate.headerId !== activeAncestor.headerId) {
r.deactivatedHeaders.push(headerToDeactivate);
await store.put(this.prepareStoredLiveHeader({ ...headerToDeactivate, isActive: false }));
headerToDeactivate = this.repairStoredLiveHeader(await store.get(headerToDeactivate.previousHeaderId));
}
let headerToActivate = oneBack;
while (headerToActivate.headerId !== activeAncestor.headerId) {
await store.put(this.prepareStoredLiveHeader({ ...headerToActivate, isActive: true }));
headerToActivate = this.repairStoredLiveHeader(await store.get(headerToActivate.previousHeaderId));
}
}
}
if (oneBack.isChainTip) {
await store.put(this.prepareStoredLiveHeader({ ...oneBack, isChainTip: false }));
}
await store.put(this.prepareStoredLiveHeader(newHeader, true));
r.added = true;
//all = await store.getAll()
//console.log(`idb store length: ${all.length} last: ${all[all.length - 1]?.height}`)
if (r.added && r.isActiveTip) {
//this.pruneLiveBlockHeaders(newHeader.height)
}
await trx.done;
return r;
}
async deleteBulkFile(fileId) {
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['bulk_headers']);
const store = trx.objectStore('bulk_headers');
await store.delete(fileId);
await trx.done;
// return number of records affected
return 1;
}
async insertBulkFile(file) {
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['bulk_headers']);
const store = trx.objectStore('bulk_headers');
const fileObj = { ...file };
delete fileObj['fileId'];
file.fileId = Number(await store.put(fileObj));
await trx.done;
return file.fileId;
}
async updateBulkFile(fileId, file) {
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['bulk_headers']);
const store = trx.objectStore('bulk_headers');
file.fileId = fileId;
await store.put(file);
await trx.done;
// return number of records affected
return 1;
}
async getBulkFiles() {
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['bulk_headers']);
const store = trx.objectStore('bulk_headers');
const files = await store.getAll();
files.sort((a, b) => a.firstHeight - b.firstHeight);
for (const file of files)
file.data = undefined;
return files;
}
async getBulkFileData(fileId, offset, length) {
if (!Number.isInteger(fileId))
throw new WERR_errors_1.WERR_INVALID_PARAMETER('fileId', 'a valid, integer bulk_files fileId');
await this.makeAvailable();
const trx = this.toDbTrxReadWrite(['bulk_headers']);
const store = trx.objectStore('bulk_headers');
const info = await store.get(fileId);
if (!info)
throw new WERR_errors_1.WERR_INVALID_PARAMETER(`fileId`, `an existing record. ${fileId} not found`);
let data = undefined;
if (!info.data)
return undefined;
if (offset !== undefined && length !== undefined && Number.isInteger(offset) && Number.isInteger(length)) {
data = info.data.slice(offset, offset + length);
}
else {
data = info.data;
}
await trx.done;
return data;
}
/**
* IndexedDB does not do indices of boolean properties.
* So true is stored as a 1, and false is stored as no property value (delete v['property'])
*
* This function restores these property values to true and false.
*
* @param header
* @returns copy of header with updated properties
*/
repairStoredLiveHeader(header) {
if (!header)
return undefined;
const h = {
...header,
isActive: !!header['isActive'],
isChainTip: !!header['isChainTip']
};
return h;
}
prepareStoredLiveHeader(header, forInsert) {
const h = { ...header };
if (forInsert)
delete h['headerId'];
if (header.isActive)
h['isActive'] = 1;
else
delete h['isActive'];
if (header.isChainTip)
h['isChainTip'] = 1;
else
delete h['isChainTip'];
return h;
}
async insertLiveHeader(header) {
const trx = this.toDbTrxReadWrite(['live_headers']);
const store = trx.objectStore('live_headers');
const h = this.prepareStoredLiveHeader(header, true);
header.headerId = Number(await store.add(h));
await trx.done;
return header;
}
async initDB() {
const db = await (0, idb_1.openDB)(this.dbName, 1, {
upgrade(db, oldVersion, newVersion, transaction) {
if (!db.objectStoreNames.contains('live_headers')) {
const liveHeadersStore = db.createObjectStore('live_headers', {
keyPath: 'headerId',
autoIncrement: true
});
liveHeadersStore.createIndex('hash', 'hash', { unique: true });
liveHeadersStore.createIndex('height', 'height', { unique: false });
liveHeadersStore.createIndex('previousHeaderId', 'previousHeaderId', { unique: false });
liveHeadersStore.createIndex('merkleRoot', 'merkleRoot', { unique: false });
liveHeadersStore.createIndex('activeTip', ['isActive', 'isChainTip'], { unique: false });
}
if (!db.objectStoreNames.contains('bulk_headers')) {
const bulkHeadersStore = db.createObjectStore('bulk_headers', {
keyPath: 'fileId',
autoIncrement: true
});
}
}
});
return db;
}
toDbTrxReadOnly(stores) {
if (!this.db)
throw new Error('not initialized');
const db = this.db;
const trx = db.transaction(stores || this.allStores, 'readonly');
this.whenLastAccess = new Date();
return trx;
}
toDbTrxReadWrite(stores) {
if (!this.db)
throw new Error('not initialized');
const db = this.db;
const trx = db.transaction(stores || this.allStores, 'readwrite');
this.whenLastAccess = new Date();
return trx;
}
}
exports.ChaintracksStorageIdb = ChaintracksStorageIdb;
//# sourceMappingURL=ChaintracksStorageIdb.js.map