@bsv/wallet-toolbox-client
Version:
Client only Wallet Storage
267 lines • 13.8 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.processAction = processAction;
exports.shareReqsWithWorld = shareReqsWithWorld;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const sdk_1 = require("@bsv/sdk");
const aggregateResults_1 = require("../../utility/aggregateResults");
const stampLog_1 = require("../../utility/stampLog");
const utilityHelpers_1 = require("../../utility/utilityHelpers");
const EntityProvenTxReq_1 = require("../schema/entities/EntityProvenTxReq");
const WERR_errors_1 = require("../../sdk/WERR_errors");
const parseTxScriptOffsets_1 = require("../../utility/parseTxScriptOffsets");
const utilityHelpers_noBuffer_1 = require("../../utility/utilityHelpers.noBuffer");
async function processAction(storage, auth, args) {
const logger = args.logger;
logger === null || logger === void 0 ? void 0 : logger.group('storage processAction');
const userId = (0, utilityHelpers_1.verifyId)(auth.userId);
const r = {
sendWithResults: undefined
};
let req;
const txidsOfReqsToShareWithWorld = [...args.sendWith];
if (args.isNewTx) {
const vargs = await validateCommitNewTxToStorageArgs(storage, userId, args);
logger === null || logger === void 0 ? void 0 : logger.log(`validated new tx updates to storage`);
({ req } = await commitNewTxToStorage(storage, userId, vargs));
logger === null || logger === void 0 ? void 0 : logger.log(`committed new tx updates to storage `);
if (!req)
throw new WERR_errors_1.WERR_INTERNAL();
// Add the new txid to sendWith unless there are no others to send and the noSend option is set.
if (args.isNoSend && !args.isSendWith) {
logger === null || logger === void 0 ? void 0 : logger.log(`noSend txid ${req.txid}`);
}
else {
txidsOfReqsToShareWithWorld.push(req.txid);
logger === null || logger === void 0 ? void 0 : logger.log(`sending txid ${req.txid}`);
}
}
const { swr, ndr } = await shareReqsWithWorld(storage, userId, txidsOfReqsToShareWithWorld, args.isDelayed, undefined, logger);
r.sendWithResults = swr;
r.notDelayedResults = ndr;
logger === null || logger === void 0 ? void 0 : logger.groupEnd();
return r;
}
/**
* Verifies that all the txids are known reqs with ready-to-share status.
* Assigns a batch identifier and updates all the provenTxReqs.
* If not isDelayed, triggers an initial attempt to broadcast the batch and returns the results.
*
* @param storage
* @param userId
* @param txids
* @param isDelayed
* @param r Optional. Ignores txids and allows ProvenTxReqs and merged beef to be passed in.
*/
async function shareReqsWithWorld(storage, userId, txids, isDelayed, r, logger) {
let swr = [];
let ndr = undefined;
if (!r && txids.length < 1)
return { swr, ndr };
// Collect what we know about these sendWith transaction txids from storage.
r || (r = await storage.getReqsAndBeefToShareWithWorld(txids, []));
const readyToSendReqs = [];
for (const getReq of r.details) {
let status = 'failed';
if (getReq.status === 'alreadySent')
status = 'unproven';
else if (getReq.status === 'readyToSend') {
status = 'sending';
readyToSendReqs.push(new EntityProvenTxReq_1.EntityProvenTxReq(getReq.req));
}
swr.push({
txid: getReq.txid,
status
});
}
// Filter original txids down to reqIds that are available and need sending
const readyToSendReqIds = readyToSendReqs.map(r => r.id);
const transactionIds = readyToSendReqs.map(r => r.notify.transactionIds || []).flat();
// If there are reqs to send, verify that we have a valid aggregate beef for them.
// If isDelayed, this (or a different beef) will have to be rebuilt at the time of sending.
if (readyToSendReqs.length > 0) {
const beefIsValid = await r.beef.verify(await storage.getServices().getChainTracker());
if (!beefIsValid) {
logger === null || logger === void 0 ? void 0 : logger.error(`VERIFY FALSE BEEF: ${r.beef.toLogString()}`);
throw new WERR_errors_1.WERR_INTERNAL(`merged Beef failed validation.`);
}
logger === null || logger === void 0 ? void 0 : logger.log(`beef is valid`);
}
// Set req batch property for the reqs being sent
// If delayed, also bump status to 'unsent' and we're done here
const batch = txids.length > 1 ? (0, utilityHelpers_1.randomBytesBase64)(16) : undefined;
if (isDelayed) {
// Just bump the req status to 'unsent' to enable background sending...
if (readyToSendReqIds.length > 0) {
await storage.transaction(async (trx) => {
await storage.updateProvenTxReq(readyToSendReqIds, { status: 'unsent', batch }, trx);
await storage.updateTransaction(transactionIds, { status: 'sending' }, trx);
});
}
return { swr, ndr };
}
if (readyToSendReqIds.length < 1) {
return { swr, ndr };
}
if (batch) {
// Keep batch values in sync...
for (const req of readyToSendReqs)
req.batch = batch;
await storage.updateProvenTxReq(readyToSendReqIds, { batch });
}
//
// Handle the NON-DELAYED-SEND-NOW case
//
const prtn = await storage.attemptToPostReqsToNetwork(readyToSendReqs, undefined, logger);
const { swr: swrRes, rar } = await (0, aggregateResults_1.aggregateActionResults)(storage, swr, prtn);
return { swr: swrRes, ndr: rar };
}
async function validateCommitNewTxToStorageArgs(storage, userId, params) {
if (!params.reference || !params.txid || !params.rawTx)
throw new WERR_errors_1.WERR_INVALID_OPERATION('One or more expected params are undefined.');
let tx;
try {
tx = sdk_1.Transaction.fromBinary(params.rawTx);
}
catch (e) {
throw new WERR_errors_1.WERR_INVALID_OPERATION('Parsing serialized transaction failed.');
}
if (params.txid !== tx.id('hex'))
throw new WERR_errors_1.WERR_INVALID_OPERATION(`Hash of serialized transaction doesn't match expected txid`);
if (!(await storage.getServices()).nLockTimeIsFinal(tx)) {
throw new WERR_errors_1.WERR_INVALID_OPERATION(`This transaction is not final.
Ensure that the transaction meets the rules for being a finalized
which can be found at https://wiki.bitcoinsv.io/index.php/NLocktime_and_nSequence`);
}
const txScriptOffsets = (0, parseTxScriptOffsets_1.parseTxScriptOffsets)(params.rawTx);
const transaction = (0, utilityHelpers_1.verifyOne)(await storage.findTransactions({
partial: { userId, reference: params.reference }
}));
if (!transaction.isOutgoing)
throw new WERR_errors_1.WERR_INVALID_OPERATION('isOutgoing is not true');
if (!transaction.inputBEEF)
throw new WERR_errors_1.WERR_INVALID_OPERATION();
const beef = sdk_1.Beef.fromBinary((0, utilityHelpers_noBuffer_1.asArray)(transaction.inputBEEF));
// TODO: Could check beef validates transaction inputs...
// Transaction must have unsigned or unprocessed status
if (transaction.status !== 'unsigned' && transaction.status !== 'unprocessed')
throw new WERR_errors_1.WERR_INVALID_OPERATION(`invalid transaction status ${transaction.status}`);
const transactionId = (0, utilityHelpers_1.verifyId)(transaction.transactionId);
const outputOutputs = await storage.findOutputs({
partial: { userId, transactionId }
});
const inputOutputs = await storage.findOutputs({
partial: { userId, spentBy: transactionId }
});
const commission = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findCommissions({ partial: { transactionId, userId } }));
if (storage.commissionSatoshis > 0) {
// A commission is required...
if (!commission)
throw new WERR_errors_1.WERR_INTERNAL();
const commissionValid = tx.outputs.some(x => x.satoshis === commission.satoshis && x.lockingScript.toHex() === (0, utilityHelpers_noBuffer_1.asString)(commission.lockingScript));
if (!commissionValid)
throw new WERR_errors_1.WERR_INVALID_OPERATION('Transaction did not include an output to cover service fee.');
}
const req = EntityProvenTxReq_1.EntityProvenTxReq.fromTxid(params.txid, params.rawTx, transaction.inputBEEF);
req.addNotifyTransactionId(transactionId);
// "Processing" a transaction is the final step of creating a new one.
// If it is to be sent to the network directly (prior to return from processAction),
// then there is status pre-send and post-send.
// Otherwise there is no post-send status.
// Note that isSendWith trumps isNoSend, e.g. isNoSend && !isSendWith
//
// Determine what status the req and transaction should have pre- at the end of processing.
// Pre-Status (to newReq/newTx) Post-Status (to all sent reqs/txs)
// req tx req tx
// isNoSend noSend noSend
// !isNoSend && isDelayed unsent unprocessed
// !isNoSend && !isDelayed unprocessed unprocessed sending/unmined sending/unproven This is the only case that sends immediately.
let postStatus = undefined;
let status;
if (params.isNoSend && !params.isSendWith)
status = { req: 'nosend', tx: 'nosend' };
else if (!params.isNoSend && params.isDelayed)
status = { req: 'unsent', tx: 'unprocessed' };
else if (!params.isNoSend && !params.isDelayed) {
status = { req: 'unprocessed', tx: 'unprocessed' };
postStatus = { req: 'unmined', tx: 'unproven' };
}
else
throw new WERR_errors_1.WERR_INTERNAL('logic error');
req.status = status.req;
const vargs = {
reference: params.reference,
txid: params.txid,
rawTx: params.rawTx,
isSendWith: !!params.sendWith && params.sendWith.length > 0,
isDelayed: params.isDelayed,
isNoSend: params.isNoSend,
// Properties with values added during validation.
tx,
txScriptOffsets,
transactionId,
transaction,
inputOutputs,
outputOutputs,
commission,
beef,
req,
outputUpdates: [],
// update txid, status in transactions table and drop rawTransaction value
transactionUpdate: {
txid: params.txid,
rawTx: undefined,
inputBEEF: undefined,
status: status.tx
},
postStatus
};
// update outputs with txid, script offsets and lengths, drop long output scripts from outputs table
// outputs spendable will be updated for change to true and all others to !!o.tracked when tx has been broadcast
// MAX_OUTPUTSCRIPT_LENGTH is limit for scripts left in outputs table
for (const o of vargs.outputOutputs) {
const vout = (0, utilityHelpers_1.verifyInteger)(o.vout);
const offset = vargs.txScriptOffsets.outputs[vout];
const rawTxScript = (0, utilityHelpers_noBuffer_1.asString)(vargs.rawTx.slice(offset.offset, offset.offset + offset.length));
if (o.lockingScript && rawTxScript !== (0, utilityHelpers_noBuffer_1.asString)(o.lockingScript))
throw new WERR_errors_1.WERR_INVALID_OPERATION(`rawTx output locking script for vout ${vout} not equal to expected output script.`);
if (tx.outputs[vout].lockingScript.toHex() !== rawTxScript)
throw new WERR_errors_1.WERR_INVALID_OPERATION(`parsed transaction output locking script for vout ${vout} not equal to expected output script.`);
const update = {
txid: vargs.txid,
spendable: true, // spendability is gated by transaction status. Remains true until the output is spent.
scriptLength: offset.length,
scriptOffset: offset.offset
};
if (offset.length > (await storage.getSettings()).maxOutputScript)
// Remove long lockingScript data from outputs table, will be read from rawTx in proven_tx or proven_tx_reqs tables.
update.lockingScript = undefined;
vargs.outputUpdates.push({ id: o.outputId, update });
}
return vargs;
}
async function commitNewTxToStorage(storage, userId, vargs) {
let log = vargs.log;
log = (0, stampLog_1.stampLog)(log, `start storage commitNewTxToStorage`);
let req;
await storage.transaction(async (trx) => {
log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction start`);
// Create initial 'nosend' proven_tx_req record to store signed, valid rawTx and input beef
req = await vargs.req.insertOrMerge(storage, trx);
log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage req inserted`);
for (const ou of vargs.outputUpdates) {
await storage.updateOutput(ou.id, ou.update, trx);
}
log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage outputs updated`);
await storage.updateTransaction(vargs.transactionId, vargs.transactionUpdate, trx);
log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction end`);
});
log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction await done`);
const r = {
req: (0, utilityHelpers_1.verifyTruthy)(req),
log
};
log = (0, stampLog_1.stampLog)(log, `end storage commitNewTxToStorage`);
return r;
}
//# sourceMappingURL=processAction.js.map