@ceramicnetwork/anchor-listener
Version:
Ceramic anchor events listener
101 lines • 4.96 kB
JavaScript
import { ANCHOR_CONTRACT_ADDRESS } from '@ceramicnetwork/anchor-utils';
import { EMPTY, bufferCount, concatMap, defer, expand, firstValueFrom, map, pipe, mergeMap, range, retry, from, } from 'rxjs';
import { createAnchorProof } from './utils.js';
const GET_LOGS_BATCH_SIZE = 1000;
export function createBlockLoader(provider, block, retryConfig = { count: 3 }) {
return defer(async () => await provider.getBlock(block)).pipe(retry(retryConfig));
}
export async function loadBlock(provider, block, retryConfig) {
return await firstValueFrom(createBlockLoader(provider, block, retryConfig));
}
export function mapLoadBlockForBlockProofs(provider, retryConfig = { count: 3 }) {
return pipe(mergeMap((blockProofsForRange) => {
return blockProofsForRange.map(async (proofs) => {
const block = await loadBlock(provider, proofs.blockHash, retryConfig);
return { block, proofs };
});
}), concatMap(async (blockPromise) => await blockPromise));
}
const groupLogsByBlockNumber = (logs) => logs.reduce((logsByBlockNumber, log) => {
const { blockNumber } = log;
if (!logsByBlockNumber[blockNumber])
logsByBlockNumber[blockNumber] = [];
logsByBlockNumber[blockNumber]?.push(log);
return logsByBlockNumber;
}, {});
export function createBlockProofsLoaderForRange(provider, chainId, blockRangeFilter, retryConfig = { count: 3 }) {
return defer(async () => {
return await Promise.all([
provider.getLogs({
address: ANCHOR_CONTRACT_ADDRESS,
fromBlock: blockRangeFilter.fromBlock,
toBlock: blockRangeFilter.toBlock,
}),
provider.getBlock(blockRangeFilter.fromBlock),
blockRangeFilter.fromBlock === blockRangeFilter.toBlock
? undefined
: provider.getBlock(blockRangeFilter.toBlock),
]);
}).pipe(retry(retryConfig), map(([logs, fromBlock, toBlock = fromBlock]) => {
const logsByBlockNumber = groupLogsByBlockNumber(logs);
if (!logsByBlockNumber[fromBlock.number]) {
logsByBlockNumber[fromBlock.number] = [];
}
if (!logsByBlockNumber[toBlock.number]) {
logsByBlockNumber[toBlock.number] = [];
}
return Object.keys(logsByBlockNumber)
.sort()
.map((blockNumberStr) => {
const blockNumber = parseInt(blockNumberStr, 10);
const logs = logsByBlockNumber[blockNumber];
let blockHash;
if (blockNumber === fromBlock.number) {
blockHash = fromBlock.hash;
}
else if (blockNumber === toBlock.number) {
blockHash = toBlock.hash;
}
else
blockHash = logs[0]?.blockHash;
return {
blockNumber: blockNumber,
blockHash,
proofs: logs.map((log) => createAnchorProof(chainId, log)),
};
});
}));
}
export async function loadBlockProofsForRange(provider, chainId, blockRangeFilter, retryConfig) {
return await firstValueFrom(createBlockProofsLoaderForRange(provider, chainId, blockRangeFilter, retryConfig));
}
export function mapLoadBlockProofs(provider, chainId, retryConfig = { count: 3 }) {
return pipe(concatMap(async (block) => {
const result = await loadBlockProofsForRange(provider, chainId, { fromBlock: block.number, toBlock: block.number }, retryConfig);
if (result.length !== 1) {
throw Error(`Did not receive exactly one set of proofs for block ${block.number}. Received ${result.length} sets`);
}
return result[0];
}));
}
export function mapLoadBlockProofsForRange(provider, chainId, retryConfig = { count: 3 }) {
return pipe(concatMap((blockRangeFilter) => {
return loadBlockProofsForRange(provider, chainId, blockRangeFilter, retryConfig);
}));
}
export function createBlocksProofsLoader({ provider, chainId, fromBlock, toBlock, retryConfig, blockLoadBuffer, }) {
const retry = retryConfig ?? { count: 3 };
return range(fromBlock, toBlock - fromBlock + 1).pipe(bufferCount(blockLoadBuffer ?? GET_LOGS_BATCH_SIZE), map((values) => ({
fromBlock: values[0],
toBlock: values[values.length - 1],
})), mapLoadBlockProofsForRange(provider, chainId, retry), concatMap((blockProofsForRange) => from(blockProofsForRange)));
}
export function createAncestorBlocksProofsLoader({ provider, chainId, initialBlock, targetAncestorHash, retryConfig, maxConcurrency, }) {
const retry = retryConfig ?? { count: 3 };
return createBlockLoader(provider, initialBlock, retry).pipe(expand((block) => {
return block.parentHash === targetAncestorHash
? EMPTY
: createBlockLoader(provider, block.parentHash, retry);
}, maxConcurrency), mapLoadBlockProofs(provider, chainId, retry));
}
//# sourceMappingURL=loader.js.map