UNPKG

lotus-sdk

Version:

Central repository for several classes of tools for integrating with, and building for, the Lotusia ecosystem

214 lines (213 loc) 7.66 kB
import { ChronikClient, } from 'chronik-client'; import { Hash } from '../bitcore/crypto/hash.js'; import { Script } from '../bitcore/script.js'; export class BurnVerifier { chronik; constructor(chronikUrl) { this.chronik = new ChronikClient(chronikUrl); } async verifyBurnTransaction(txId, outputIndex, maturationPeriod = 100) { try { const tx = await this.chronik.tx(txId); if (!tx) { console.warn(`[BurnVerifier] Transaction not found: ${txId}`); return null; } if (!tx.block) { console.warn(`[BurnVerifier] Transaction not mined yet: ${txId}`); return null; } const blockchainInfo = await this.chronik.blockchainInfo(); const currentHeight = blockchainInfo.tipHeight; const confirmations = currentHeight - tx.block.height + 1; const isMatured = maturationPeriod === 0 || confirmations >= maturationPeriod; if (!isMatured && maturationPeriod > 0) { console.log(`[BurnVerifier] Burn not yet matured: ${confirmations} confirmations (need ${maturationPeriod}) for ${txId}`); } const output = tx.outputs[outputIndex]; if (!output) { console.warn(`[BurnVerifier] Output ${outputIndex} not found in transaction ${txId}`); return null; } const script = Script.fromHex(output.outputScript); if (!script.isDataOut()) { console.warn(`[BurnVerifier] Output ${outputIndex} is not OP_RETURN (script: ${output.outputScript})`); return null; } const burnAmount = parseInt(output.value, 10); let lokadPrefix; let lokadVersion; let lokadPayload; const chunks = script.chunks; if (chunks.length >= 3 && chunks[1].buf && chunks[1].buf.length === 4) { lokadPrefix = chunks[1].buf; if (chunks[2].buf && chunks[2].buf.length === 1) { lokadVersion = chunks[2].buf[0]; } if (chunks.length > 3) { const payloadChunks = []; for (let i = 3; i < chunks.length; i++) { const chunkBuf = chunks[i].buf; if (chunkBuf) { payloadChunks.push(chunkBuf); } } if (payloadChunks.length > 0) { lokadPayload = Buffer.concat(payloadChunks); } } } return { txId, outputIndex, burnAmount, blockHeight: tx.block.height, confirmations, isMatured, script, lokadPrefix, lokadVersion, lokadPayload, scriptHex: output.outputScript, }; } catch (error) { console.error('[BurnVerifier] Error verifying burn transaction:', error); return null; } } deriveIdentityId(txId, outputIndex) { const data = Buffer.concat([ Buffer.from(txId, 'hex'), Buffer.from([outputIndex]), ]); return Hash.sha256(data).toString('hex'); } verifyLokadPrefix(script, expectedPrefix) { const chunks = script.chunks; if (chunks.length < 2) return false; const prefix = chunks[1].buf; if (!prefix || prefix.length !== 4) return false; return prefix.equals(expectedPrefix); } parsePublicKeyFromLokad(lokadPayload) { if (!lokadPayload || lokadPayload.length < 33) { return null; } const pubKeyBytes = lokadPayload.slice(0, 33); const prefix = pubKeyBytes[0]; if (prefix !== 0x02 && prefix !== 0x03) { return null; } return pubKeyBytes; } } export function satoshisToXPI(satoshis) { return satoshis / 1_000_000; } export function xpiToSatoshis(xpi) { return Math.floor(xpi * 1_000_000); } export function formatXPI(satoshis) { const xpi = satoshisToXPI(satoshis); return `${xpi.toFixed(6)} XPI`; } export class TransactionMonitor { chronik; monitoredTxs = new Map(); constructor(chronikUrl) { this.chronik = new ChronikClient(chronikUrl); } async checkConfirmations(txId, requiredConfirmations = 1) { let tx = null; try { tx = await this.chronik.tx(txId); } catch (error) { console.error(`[TxMonitor] Error checking confirmations for ${txId}:`, error); return null; } if (!tx) { return null; } if (!tx.block) { return { txId, blockHeight: -1, confirmations: 0, isConfirmed: false, }; } let blockchainInfo = null; try { blockchainInfo = await this.chronik.blockchainInfo(); } catch (error) { console.error(`[TxMonitor] Error getting blockchain info:`, error); return null; } const currentHeight = blockchainInfo.tipHeight; const confirmations = currentHeight - tx.block.height + 1; return { txId, blockHeight: tx.block.height, confirmations, isConfirmed: confirmations >= requiredConfirmations, }; } async waitForConfirmations(txId, requiredConfirmations = 1, pollInterval = 5000, timeout = 600000) { const startTime = Date.now(); while (Date.now() - startTime < timeout) { const info = await this.checkConfirmations(txId, requiredConfirmations); if (info && info.isConfirmed) { return info; } await new Promise(resolve => setTimeout(resolve, pollInterval)); } console.warn(`[TxMonitor] Timeout waiting for ${txId} confirmations`); return null; } async broadcastTransaction(txHex) { try { const result = await this.chronik.broadcastTx(txHex); console.log(`[TxMonitor] Broadcast successful: ${result.txid}`); return result.txid; } catch (error) { console.error('[TxMonitor] Broadcast failed:', error); return null; } } async getTransaction(txId) { try { return await this.chronik.tx(txId); } catch (error) { console.error(`[TxMonitor] Error fetching transaction ${txId}:`, error); return null; } } async getUtxos(address) { try { const script = Script.fromAddress(address); const scriptHex = script.toHex(); const utxos = await this.chronik.script('p2pkh', scriptHex).utxos(); return utxos[0]?.utxos || []; } catch (error) { console.error(`[TxMonitor] Error fetching UTXOs for ${address}:`, error); return []; } } async batchCheckConfirmations(txIds, requiredConfirmations = 1) { const results = new Map(); const promises = txIds.map(async (txId) => { const info = await this.checkConfirmations(txId, requiredConfirmations); results.set(txId, info); }); await Promise.all(promises); return results; } }