wallet-storage-client
Version:
Client only Wallet Storage
260 lines • 13.2 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.processAction = processAction;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const sdk_1 = require("@bsv/sdk");
const index_client_1 = require("../../index.client");
async function processAction(storage, auth, args) {
(0, index_client_1.stampLog)(args.log, `start storage processActionSdk`);
const userId = (0, index_client_1.verifyId)(auth.userId);
const r = {
sendWithResults: undefined
};
let req;
const txidsOfReqsToShareWithWorld = [...args.sendWith];
if (args.isNewTx) {
const vargs = await validateCommitNewTxToStorageArgs(storage, userId, args);
({ req, log: args.log } = await commitNewTxToStorage(storage, userId, vargs));
if (!req)
throw new index_client_1.sdk.WERR_INTERNAL();
// Add the new txid to sendWith unless there are no others to send and the noSend option is set.
if (args.isNoSend && !args.isSendWith)
(0, index_client_1.stampLog)(args.log, `... storage processActionSdk newTx committed noSend`);
else {
txidsOfReqsToShareWithWorld.push(req.txid);
(0, index_client_1.stampLog)(args.log, `... storage processActionSdk newTx committed sendWith ${req.txid}`);
}
}
const swr = await shareReqsWithWorld(storage, userId, txidsOfReqsToShareWithWorld, args.isDelayed);
if (args.isSendWith) {
r.sendWithResults = swr;
}
(0, index_client_1.stampLog)(args.log, `end storage processActionSdk`);
return r;
}
/**
* Verifies that all the txids are known reqs with ready-to-share status.
* Assigns a batch identifier and updates all the provenTxReqs.
* If not isDelayed, triggers an initial attempt to broadcast the batch and returns the results.
*
* @param storage
* @param userId
* @param txids
* @param isDelayed
*/
async function shareReqsWithWorld(storage, userId, txids, isDelayed) {
if (txids.length < 1)
return [];
// Collect what we know about these sendWith transaction txids from storage.
const r = await storage.getReqsAndBeefToShareWithWorld(txids, []);
// Initialize aggregate results for each txid
const ars = [];
for (const getReq of r.details)
ars.push({ txid: getReq.txid, getReq });
// Filter original txids down to reqIds that are available and need sending
const readyToSendReqs = r.details.filter(d => d.status === 'readyToSend').map(d => new index_client_1.entity.ProvenTxReq(d.req));
const readyToSendReqIds = readyToSendReqs.map(r => r.id);
const transactionIds = readyToSendReqs.map(r => r.notify.transactionIds || []).flat();
// If there are reqs to send, verify that we have a valid aggregate beef for them.
// If isDelayed, this (or a different beef) will have to be rebuilt at the time of sending.
if (readyToSendReqs.length > 0) {
const beefIsValid = await r.beef.verify(await storage.getServices().getChainTracker());
if (!beefIsValid)
throw new index_client_1.sdk.WERR_INTERNAL(`merged Beef failed validation.`);
}
// Set req batch property for the reqs being sent
// If delayed, also bump status to 'unsent' and we're done here
const batch = txids.length > 1 ? (0, index_client_1.randomBytesBase64)(16) : undefined;
if (isDelayed) {
// Just bump the req status to 'unsent' to enable background sending...
if (readyToSendReqIds.length > 0) {
await storage.transaction(async (trx) => {
await storage.updateProvenTxReq(readyToSendReqIds, { status: 'unsent', batch }, trx);
await storage.updateTransaction(transactionIds, { status: 'sending' }, trx);
});
}
return createSendWithResults();
}
if (readyToSendReqIds.length < 1) {
return createSendWithResults();
}
if (batch) {
// Keep batch values in sync...
for (const req of readyToSendReqs)
req.batch = batch;
await storage.updateProvenTxReq(readyToSendReqIds, { batch });
}
//
// Handle the NON-DELAYED-SEND-NOW case
//
const prtn = await storage.attemptToPostReqsToNetwork(readyToSendReqs);
// merge the individual PostBeefResultForTxid results to postBeef in aggregate results.
for (const ar of ars) {
const d = prtn.details.find(d => d.txid === ar.txid);
if (d) {
ar.postBeef = d.pbrft;
}
}
const rs = createSendWithResults();
return rs;
function createSendWithResults() {
var _a;
const rs = [];
for (const ar of ars) {
let status = 'failed';
if (ar.getReq.status === 'alreadySent')
status = 'unproven';
else if (ar.getReq.status === 'readyToSend' && (isDelayed || ((_a = ar.postBeef) === null || _a === void 0 ? void 0 : _a.status) === 'success'))
status = 'sending';
rs.push({
txid: ar.txid,
status
});
}
return rs;
}
}
async function validateCommitNewTxToStorageArgs(storage, userId, params) {
if (!params.reference || !params.txid || !params.rawTx)
throw new index_client_1.sdk.WERR_INVALID_OPERATION('One or more expected params are undefined.');
let tx;
try {
tx = sdk_1.Transaction.fromBinary(params.rawTx);
}
catch (e) {
throw new index_client_1.sdk.WERR_INVALID_OPERATION('Parsing serialized transaction failed.');
}
if (params.txid !== tx.id('hex'))
throw new index_client_1.sdk.WERR_INVALID_OPERATION(`Hash of serialized transaction doesn't match expected txid`);
if (!(await storage.getServices()).nLockTimeIsFinal(tx)) {
throw new index_client_1.sdk.WERR_INVALID_OPERATION(`This transaction is not final.
Ensure that the transaction meets the rules for being a finalized
which can be found at https://wiki.bitcoinsv.io/index.php/NLocktime_and_nSequence`);
}
const txScriptOffsets = (0, index_client_1.parseTxScriptOffsets)(params.rawTx);
const transaction = (0, index_client_1.verifyOne)(await storage.findTransactions({ partial: { userId, reference: params.reference } }));
if (!transaction.isOutgoing)
throw new index_client_1.sdk.WERR_INVALID_OPERATION('isOutgoing is not true');
if (!transaction.inputBEEF)
throw new index_client_1.sdk.WERR_INVALID_OPERATION();
const beef = sdk_1.Beef.fromBinary((0, index_client_1.asArray)(transaction.inputBEEF));
// TODO: Could check beef validates transaction inputs...
// Transaction must have unsigned or unprocessed status
if (transaction.status !== 'unsigned' && transaction.status !== 'unprocessed')
throw new index_client_1.sdk.WERR_INVALID_OPERATION(`invalid transaction status ${transaction.status}`);
const transactionId = (0, index_client_1.verifyId)(transaction.transactionId);
const outputOutputs = await storage.findOutputs({ partial: { userId, transactionId } });
const inputOutputs = await storage.findOutputs({ partial: { userId, spentBy: transactionId } });
const commission = (0, index_client_1.verifyOneOrNone)(await storage.findCommissions({ partial: { transactionId, userId } }));
if (storage.commissionSatoshis > 0) {
// A commission is required...
if (!commission)
throw new index_client_1.sdk.WERR_INTERNAL();
const commissionValid = tx.outputs
.some(x => x.satoshis === commission.satoshis && x.lockingScript.toHex() === (0, index_client_1.asString)(commission.lockingScript));
if (!commissionValid)
throw new index_client_1.sdk.WERR_INVALID_OPERATION('Transaction did not include an output to cover service fee.');
}
const req = index_client_1.entity.ProvenTxReq.fromTxid(params.txid, params.rawTx, transaction.inputBEEF);
req.addNotifyTransactionId(transactionId);
// "Processing" a transaction is the final step of creating a new one.
// If it is to be sent to the network directly (prior to return from processAction),
// then there is status pre-send and post-send.
// Otherwise there is no post-send status.
// Note that isSendWith trumps isNoSend, e.g. isNoSend && !isSendWith
//
// Determine what status the req and transaction should have pre- at the end of processing.
// Pre-Status (to newReq/newTx) Post-Status (to all sent reqs/txs)
// req tx req tx
// isNoSend noSend noSend
// !isNoSend && isDelayed unsent unprocessed
// !isNoSend && !isDelayed unprocessed unprocessed sending/unmined sending/unproven This is the only case that sends immediately.
let postStatus = undefined;
let status;
if (params.isNoSend && !params.isSendWith)
status = { req: 'nosend', tx: 'nosend' };
else if (!params.isNoSend && params.isDelayed)
status = { req: 'unsent', tx: 'unprocessed' };
else if (!params.isNoSend && !params.isDelayed) {
status = { req: 'unprocessed', tx: 'unprocessed' };
postStatus = { req: 'unmined', tx: 'unproven' };
}
else
throw new index_client_1.sdk.WERR_INTERNAL('logic error');
req.status = status.req;
const vargs = {
reference: params.reference,
txid: params.txid,
rawTx: params.rawTx,
isSendWith: !!params.sendWith && params.sendWith.length > 0,
isDelayed: params.isDelayed,
isNoSend: params.isNoSend,
// Properties with values added during validation.
tx,
txScriptOffsets,
transactionId,
transaction,
inputOutputs,
outputOutputs,
commission,
beef,
req,
outputUpdates: [],
// update txid, status in transactions table and drop rawTransaction value
transactionUpdate: {
txid: params.txid,
rawTx: undefined,
inputBEEF: undefined,
status: status.tx
},
postStatus
};
// update outputs with txid, script offsets and lengths, drop long output scripts from outputs table
// outputs spendable will be updated for change to true and all others to !!o.tracked when tx has been broadcast
// MAX_OUTPUTSCRIPT_LENGTH is limit for scripts left in outputs table
for (const o of vargs.outputOutputs) {
const vout = (0, index_client_1.verifyTruthy)(o.vout);
const offset = vargs.txScriptOffsets.outputs[vout];
const rawTxScript = (0, index_client_1.asString)(vargs.rawTx.slice(offset.offset, offset.offset + offset.length));
if (o.lockingScript && rawTxScript !== (0, index_client_1.asString)(o.lockingScript))
throw new index_client_1.sdk.WERR_INVALID_OPERATION(`rawTx output locking script for vout ${vout} not equal to expected output script.`);
if (tx.outputs[vout].lockingScript.toHex() !== rawTxScript)
throw new index_client_1.sdk.WERR_INVALID_OPERATION(`parsed transaction output locking script for vout ${vout} not equal to expected output script.`);
const update = {
txid: vargs.txid,
spendable: true, // spendability is gated by transaction status. Remains true until the output is spent.
scriptLength: offset.length,
scriptOffset: offset.offset,
};
if (offset.length > (await storage.getSettings()).maxOutputScript)
// Remove long lockingScript data from outputs table, will be read from rawTx in proven_tx or proven_tx_reqs tables.
update.lockingScript = undefined;
vargs.outputUpdates.push({ id: o.outputId, update });
}
return vargs;
}
async function commitNewTxToStorage(storage, userId, vargs) {
let log = vargs.log;
log = (0, index_client_1.stampLog)(log, `start storage commitNewTxToStorage`);
let req;
await storage.transaction(async (trx) => {
log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction start`);
// Create initial 'nosend' proven_tx_req record to store signed, valid rawTx and input beef
req = await vargs.req.insertOrMerge(storage, trx);
log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage req inserted`);
for (const ou of vargs.outputUpdates) {
await storage.updateOutput(ou.id, ou.update, trx);
}
log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage outputs updated`);
await storage.updateTransaction(vargs.transactionId, vargs.transactionUpdate, trx);
log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction end`);
});
log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction await done`);
const r = {
req: (0, index_client_1.verifyTruthy)(req),
log
};
log = (0, index_client_1.stampLog)(log, `end storage commitNewTxToStorage`);
return r;
}
//# sourceMappingURL=processAction.js.map