@bsv/wallet-toolbox-client
Version:
Client only Wallet Storage
142 lines • 6.62 kB
TypeScript
import { ChaintracksFetchApi } from '../Api/ChaintracksFetchApi';
import { BlockHeader, Chain } from '../../../../sdk';
import { BulkHeaderFileInfo } from './BulkHeaderFile';
import { HeightRange } from './HeightRange';
import { ChaintracksStorageBulkFileApi } from '../Api/ChaintracksStorageApi';
import { ChaintracksFsApi } from '../Api/ChaintracksFsApi';
export interface BulkFileDataManagerOptions {
chain: Chain;
maxPerFile: number;
maxRetained?: number;
fetch?: ChaintracksFetchApi;
fromKnownSourceUrl?: string;
}
/**
* Manages bulk file data (typically 8MB chunks of 100,000 headers each).
*
* If not cached in memory,
* optionally fetches data by `sourceUrl` from CDN on demand,
* optionally finds data by `fileId` in a database on demand,
* and retains a limited number of files in memory,
* subject to the optional `maxRetained` limit.
*/
export declare class BulkFileDataManager {
static createDefaultOptions(chain: Chain): BulkFileDataManagerOptions;
private bfds;
private fileHashToIndex;
private lock;
private storage?;
readonly chain: Chain;
readonly maxPerFile: number;
readonly fetch?: ChaintracksFetchApi;
readonly maxRetained?: number;
readonly fromKnownSourceUrl?: string;
constructor(options: BulkFileDataManagerOptions | Chain);
createReader(range?: HeightRange, maxBufferSize?: number): Promise<BulkFileDataReader>;
updateFromUrl(cdnUrl: string): Promise<void>;
setStorage(storage: ChaintracksStorageBulkFileApi): Promise<void>;
private setStorageNoLock;
deleteBulkFiles(): Promise<void>;
private deleteBulkFilesNoLock;
merge(files: BulkHeaderFileInfo[]): Promise<BulkFileDataManagerMergeResult>;
private mergeNoLock;
private mergeIncremental;
toLogString(what?: BulkFileDataManagerMergeResult | BulkFileData[] | BulkHeaderFileInfo[]): string;
mergeIncrementalBlockHeaders(newBulkHeaders: BlockHeader[], incrementalChainWork?: string): Promise<void>;
getBulkFiles(keepData?: boolean): Promise<BulkHeaderFileInfo[]>;
getHeightRange(): Promise<HeightRange>;
getDataFromFile(file: BulkHeaderFileInfo, offset?: number, length?: number): Promise<Uint8Array | undefined>;
private getDataFromFileNoLock;
findHeaderForHeightOrUndefined(height: number): Promise<BlockHeader | undefined>;
getFileForHeight(height: number): Promise<BulkHeaderFileInfo | undefined>;
private getBfdForHeight;
private getLastBfd;
getLastFile(fromEnd?: number): Promise<BulkHeaderFileInfo | undefined>;
private getLastFileNoLock;
private getDataByFileHash;
private getDataByFileId;
private validateFileInfo;
ReValidate(): Promise<void>;
private ReValidateNoLock;
private validateBfdForAdd;
private add;
private replaceBfdAtIndex;
/**
* Updating an existing file occurs in two specific contexts:
*
* 1. CDN Update: CDN files of a specific `maxPerFile` series typically ends in a partial file
* which may periodically add more headers until the next file is started.
* If the CDN update is the second to last file (followed by an incremental file),
* then the incremental file is updated or deleted and also returned as the result (with a count of zero if deleted).
*
* 2. Incremental Update: The last bulk file is almost always an "incremental" file
* which is not limited by "maxPerFile" and holds all non-CDN bulk headers.
* If is updated with new bulk headers which come either from non CDN ingestors or from live header migration to bulk.
*
* Updating preserves the following properties:
*
* - Any existing headers following this update are preserved and must form an unbroken chain.
* - There can be at most one incremental file and it must be the last file.
* - The update start conditions (height, prevHash, prevChainWork) must match an existing file which may be either CDN or internal.
* - The update fileId must match, it may be undefind.
* - The fileName does not need to match.
* - The incremental file must always have fileName "incremental" and sourceUrl must be undefined.
* - The update count must be greater than 0.
* - The update count must be greater than current count for CDN to CDN update.
*
* @param update new validated BulkFileData to update.
* @param hbf corresponding existing BulkFileData to update.
*/
private update;
private dropLastBulkFile;
/**
* Remove work (and headers) from `truncate` that now exists in `update`.
* There are two scenarios:
* 1. `replaced` is undefined: update is a CDN file that splits an incremental file that must be truncated.
* 2. `replaced` is valid: update is a CDN update that replaced an existing CDN file and splits an incremental file that must be truncated.
* @param update the new CDN update file.
* @param truncate the incremental file to be truncated (losing work which now exists in `update`).
* @param replaced the existing CDN file that was replaced by `update` (if any).
*/
private shiftWork;
/**
*
* @param bfd
* @returns
*/
private ensureData;
private ensureMaxRetained;
exportHeadersToFs(toFs: ChaintracksFsApi, toHeadersPerFile: number, toFolder: string, sourceUrl?: string, maxHeight?: number): Promise<void>;
}
interface BulkFileData extends BulkHeaderFileInfo {
mru: number;
fileHash: string;
}
export declare function selectBulkHeaderFiles(files: BulkHeaderFileInfo[], chain: Chain, maxPerFile: number): BulkHeaderFileInfo[];
export interface BulkFileDataManagerMergeResult {
unchanged: BulkHeaderFileInfo[];
inserted: BulkHeaderFileInfo[];
updated: BulkHeaderFileInfo[];
dropped: BulkHeaderFileInfo[];
}
export declare class BulkFileDataReader {
readonly manager: BulkFileDataManager;
readonly range: HeightRange;
readonly maxBufferSize: number;
nextHeight: number;
constructor(manager: BulkFileDataManager, range: HeightRange, maxBufferSize: number);
/**
* Returns the Buffer of block headers from the given `file` for the given `range`.
* If `range` is undefined, the file's full height range is read.
* The returned Buffer will only contain headers in `file` and in `range`
* @param file
* @param range
*/
private readBufferFromFile;
/**
* @returns an array containing the next `maxBufferSize` bytes of headers from the files.
*/
read(): Promise<Uint8Array | undefined>;
}
export {};
//# sourceMappingURL=BulkFileDataManager.d.ts.map