wallet-storage-client
Version:
Client only Wallet Storage
494 lines • 23.2 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.StorageProvider = void 0;
exports.validateStorageFeeModel = validateStorageFeeModel;
const sdk_1 = require("@bsv/sdk");
const index_client_1 = require("../index.client");
const getBeefForTransaction_1 = require("./methods/getBeefForTransaction");
const processAction_1 = require("./methods/processAction");
const attemptToPostReqsToNetwork_1 = require("./methods/attemptToPostReqsToNetwork");
const listCertificates_1 = require("./methods/listCertificates");
const createAction_1 = require("./methods/createAction");
const internalizeAction_1 = require("./methods/internalizeAction");
const StorageReaderWriter_1 = require("./StorageReaderWriter");
const entities_1 = require("./schema/entities");
class StorageProvider extends StorageReaderWriter_1.StorageReaderWriter {
static defaultOptions() {
return {
feeModel: { model: 'sat/kb', value: 1 },
commissionSatoshis: 0,
commissionPubKeyHex: undefined
};
}
static createStorageBaseOptions(chain) {
const options = {
...StorageProvider.defaultOptions(),
chain,
};
return options;
}
constructor(options) {
super(options);
this.isDirty = false;
this.feeModel = options.feeModel;
this.commissionPubKeyHex = options.commissionPubKeyHex;
this.commissionSatoshis = options.commissionSatoshis;
}
isStorageProvider() { return true; }
setServices(v) { this._services = v; }
getServices() {
if (!this._services)
throw new index_client_1.sdk.WERR_INVALID_OPERATION('Must setServices first.');
return this._services;
}
async abortAction(auth, args) {
const r = await this.transaction(async (trx) => {
const tx = (0, index_client_1.verifyOneOrNone)(await this.findTransactions({ partial: args, noRawTx: true, trx }));
const unAbortableStatus = ['completed', 'failed', 'sending', 'unproven'];
if (!tx || !tx.isOutgoing || -1 < unAbortableStatus.findIndex(s => s === tx.status))
throw new index_client_1.sdk.WERR_INVALID_PARAMETER('reference', 'an inprocess, outgoing action that has not been signed and shared to the network.');
await this.updateTransactionStatus('failed', tx.transactionId, undefined, args.reference, trx);
if (tx.txid) {
const req = await index_client_1.entity.ProvenTxReq.fromStorageTxid(this, tx.txid, trx);
if (req) {
req.addHistoryNote({ what: 'aborted' });
req.status = 'invalid';
await req.updateStorageDynamicProperties(this, trx);
}
}
const r = {
aborted: true
};
return r;
});
return r;
}
async internalizeAction(auth, args) {
return await (0, internalizeAction_1.internalizeAction)(this, auth, args);
}
/**
* Given an array of transaction txids with current ProvenTxReq ready-to-share status,
* lookup their DojoProvenTxReqApi req records.
* For the txids with reqs and status still ready to send construct a single merged beef.
*
* @param txids
* @param knownTxids
* @param trx
*/
async getReqsAndBeefToShareWithWorld(txids, knownTxids, trx) {
const r = {
beef: new sdk_1.Beef(),
details: []
};
for (const txid of txids) {
const d = {
txid,
status: 'unknown'
};
r.details.push(d);
try {
d.proven = (0, index_client_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid }, trx }));
if (d.proven)
d.status = 'alreadySent';
else {
const alreadySentStatus = ['unmined', 'callback', 'unconfirmed', 'completed'];
const readyToSendStatus = ['sending', 'unsent', 'nosend', 'unprocessed'];
const errorStatus = ['unknown', 'nonfinal', 'invalid', 'doubleSpend'];
d.req = (0, index_client_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid }, trx }));
if (!d.req) {
d.status = 'error';
d.error = `ERR_UNKNOWN_TXID: ${txid} was not found.`;
}
else if (errorStatus.indexOf(d.req.status) > -1) {
d.status = 'error';
d.error = `ERR_INVALID_PARAMETER: ${txid} is not ready to send.`;
}
else if (alreadySentStatus.indexOf(d.req.status) > -1) {
d.status = 'alreadySent';
}
else if (readyToSendStatus.indexOf(d.req.status) > -1) {
if (!d.req.rawTx || !d.req.inputBEEF) {
d.status = 'error';
d.error = `ERR_INTERNAL: ${txid} req is missing rawTx or beef.`;
}
else
d.status = 'readyToSend';
}
else {
d.status = 'error';
d.error = `ERR_INTERNAL: ${txid} has unexpected req status ${d.req.status}`;
}
if (d.status === 'readyToSend') {
await this.mergeReqToBeefToShareExternally(d.req, r.beef, knownTxids, trx);
}
}
}
catch (eu) {
const e = index_client_1.sdk.WalletError.fromUnknown(eu);
d.error = `${e.name}: ${e.message}`;
}
}
return r;
}
async mergeReqToBeefToShareExternally(req, mergeToBeef, knownTxids, trx) {
const { rawTx, inputBEEF: beef } = req;
if (!rawTx || !beef)
throw new index_client_1.sdk.WERR_INTERNAL(`req rawTx and beef must be valid.`);
mergeToBeef.mergeRawTx((0, index_client_1.asArray)(rawTx));
mergeToBeef.mergeBeef((0, index_client_1.asArray)(beef));
const tx = sdk_1.Transaction.fromBinary((0, index_client_1.asArray)(rawTx));
for (const input of tx.inputs) {
if (!input.sourceTXID)
throw new index_client_1.sdk.WERR_INTERNAL(`req all transaction inputs must have valid sourceTXID`);
const txid = input.sourceTXID;
const btx = mergeToBeef.findTxid(txid);
if (!btx) {
if (knownTxids && knownTxids.indexOf(txid) > -1)
mergeToBeef.mergeTxidOnly(txid);
else
await this.getValidBeefForKnownTxid(txid, mergeToBeef, undefined, knownTxids, trx);
}
}
}
/**
* Checks if txid is a known valid ProvenTx and returns it if found.
* Next checks if txid is a current ProvenTxReq and returns that if found.
* If `newReq` is provided and an existing ProvenTxReq isn't found,
* use `newReq` to create a new ProvenTxReq.
*
* This is safe "findOrInsert" operation using retry if unique index constraint
* is violated by a race condition insert.
*
* @param txid
* @param newReq
* @param trx
* @returns
*/
async getProvenOrReq(txid, newReq, trx) {
if (newReq && txid !== newReq.txid)
throw new index_client_1.sdk.WERR_INVALID_PARAMETER('newReq', `same txid`);
const r = { proven: undefined, req: undefined };
r.proven = (0, index_client_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid }, trx }));
if (r.proven)
return r;
for (let retry = 0;; retry++) {
try {
r.req = (0, index_client_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid }, trx }));
if (!r.req && !newReq)
break;
if (!r.req && newReq) {
await this.insertProvenTxReq(newReq, trx);
}
if (r.req && newReq) {
// Merge history and notify into existing
const req1 = new entities_1.ProvenTxReq(r.req);
req1.mergeHistory(newReq, undefined, true);
req1.mergeNotifyTransactionIds(newReq);
await req1.updateStorageDynamicProperties(this, trx);
}
break;
}
catch (eu) {
if (retry > 0)
throw eu;
}
}
return r;
}
async updateTransactionsStatus(transactionIds, status) {
await this.transaction(async (trx) => {
for (const id of transactionIds) {
await this.updateTransactionStatus(status, id, undefined, undefined, trx);
}
});
}
/**
* For all `status` values besides 'failed', just updates the transaction records status property.
*
* For 'status' of 'failed', attempts to make outputs previously allocated as inputs to this transaction usable again.
*
* @throws ERR_DOJO_COMPLETED_TX if current status is 'completed' and new status is not 'completed.
* @throws ERR_DOJO_PROVEN_TX if transaction has proof or provenTxId and new status is not 'completed'.
*
* @param status
* @param transactionId
* @param userId
* @param reference
* @param trx
*/
async updateTransactionStatus(status, transactionId, userId, reference, trx) {
if (!transactionId && !(userId && reference))
throw new index_client_1.sdk.WERR_MISSING_PARAMETER('either transactionId or userId and reference');
await this.transaction(async (trx) => {
const where = {};
if (transactionId)
where.transactionId = transactionId;
if (userId)
where.userId = userId;
if (reference)
where.reference = reference;
const tx = (0, index_client_1.verifyOne)(await this.findTransactions({ partial: where, noRawTx: true, trx }));
//if (tx.status === status)
// no change required. Assume inputs and outputs spendable and spentBy are valid for status.
//return
// Once completed, this method cannot be used to "uncomplete" transaction.
if (status !== 'completed' && tx.status === 'completed' || tx.provenTxId)
throw new index_client_1.sdk.WERR_INVALID_OPERATION('The status of a "completed" transaction cannot be changed.');
// It is not possible to un-fail a transaction. Information is lost and not recoverable.
if (status !== 'failed' && tx.status === 'failed')
throw new index_client_1.sdk.WERR_INVALID_OPERATION(`A "failed" transaction may not be un-failed by this method.`);
switch (status) {
case 'failed':
{
// Attempt to make outputs previously allocated as inputs to this transaction usable again.
// Only clear input's spentBy and reset spendable = true if it references this transaction
const t = new index_client_1.entity.Transaction(tx);
const inputs = await t.getInputs(this, trx);
for (const input of inputs) {
// input is a prior output belonging to userId that reference this transaction either by `spentBy`
// or by txid and vout.
await this.updateOutput((0, index_client_1.verifyId)(input.outputId), { spendable: true, spentBy: undefined }, trx);
}
}
break;
case 'nosend':
case 'unsigned':
case 'unprocessed':
case 'sending':
case 'unproven':
case 'completed':
break;
default:
throw new index_client_1.sdk.WERR_INVALID_PARAMETER('status', `not be ${status}`);
}
await this.updateTransaction(tx.transactionId, { status }, trx);
}, trx);
}
async createAction(auth, args) {
if (!auth.userId)
throw new index_client_1.sdk.WERR_UNAUTHORIZED();
return await (0, createAction_1.createAction)(this, auth, args);
}
async processAction(auth, args) {
if (!auth.userId)
throw new index_client_1.sdk.WERR_UNAUTHORIZED();
return await (0, processAction_1.processAction)(this, auth, args);
}
async attemptToPostReqsToNetwork(reqs, trx) {
return await (0, attemptToPostReqsToNetwork_1.attemptToPostReqsToNetwork)(this, reqs, trx);
}
async listCertificates(auth, args) {
return await (0, listCertificates_1.listCertificates)(this, auth, args);
}
async verifyKnownValidTransaction(txid, trx) {
const { proven, rawTx } = await this.getProvenOrRawTx(txid, trx);
return proven != undefined || rawTx != undefined;
}
async getValidBeefForKnownTxid(txid, mergeToBeef, trustSelf, knownTxids, trx) {
const beef = await this.getValidBeefForTxid(txid, mergeToBeef, trustSelf, knownTxids, trx);
if (!beef)
throw new index_client_1.sdk.WERR_INVALID_PARAMETER('txid', `${txid} is not known to storage.`);
return beef;
}
async getValidBeefForTxid(txid, mergeToBeef, trustSelf, knownTxids, trx) {
const beef = mergeToBeef || new sdk_1.Beef();
const r = await this.getProvenOrRawTx(txid, trx);
if (r.proven) {
if (trustSelf === 'known')
beef.mergeTxidOnly(txid);
else {
beef.mergeRawTx(r.proven.rawTx);
const mp = new index_client_1.entity.ProvenTx(r.proven).getMerklePath();
beef.mergeBump(mp);
return beef;
}
}
if (r.rawTx && r.inputBEEF) {
if (trustSelf === 'known')
beef.mergeTxidOnly(txid);
else {
beef.mergeRawTx(r.rawTx);
beef.mergeBeef(r.inputBEEF);
const tx = sdk_1.Transaction.fromBinary(r.rawTx);
for (const input of tx.inputs) {
const btx = beef.findTxid(input.sourceTXID);
if (!btx) {
if (knownTxids && knownTxids.indexOf(input.sourceTXID) > -1)
beef.mergeTxidOnly(input.sourceTXID);
else
await this.getValidBeefForKnownTxid(input.sourceTXID, beef, trustSelf, knownTxids, trx);
}
}
return beef;
}
}
return undefined;
}
async getBeefForTransaction(txid, options) {
return await (0, getBeefForTransaction_1.getBeefForTransaction)(this, txid, options);
}
async findMonitorEventById(id, trx) {
return (0, index_client_1.verifyOneOrNone)(await this.findMonitorEvents({ partial: { id }, trx }));
}
async relinquishCertificate(auth, args) {
const vargs = index_client_1.sdk.validateRelinquishCertificateArgs(args);
const cert = (0, index_client_1.verifyOne)(await this.findCertificates({ partial: { certifier: vargs.certifier, serialNumber: vargs.serialNumber, type: vargs.type } }));
return await this.updateCertificate(cert.certificateId, { isDeleted: true });
}
async relinquishOutput(auth, args) {
const vargs = index_client_1.sdk.validateRelinquishOutputArgs(args);
const { txid, vout } = index_client_1.sdk.parseWalletOutpoint(vargs.output);
const output = (0, index_client_1.verifyOne)(await this.findOutputs({ partial: { txid, vout } }));
return await this.updateOutput(output.outputId, { basketId: undefined });
}
async processSyncChunk(args, chunk) {
const user = (0, index_client_1.verifyTruthy)(await this.findUserByIdentityKey(args.identityKey));
const ss = new index_client_1.entity.SyncState((0, index_client_1.verifyOne)(await this.findSyncStates({ partial: { storageIdentityKey: args.fromStorageIdentityKey, userId: user.userId } })));
const r = await ss.processSyncChunk(this, args, chunk);
return r;
}
/**
* Handles storage changes when a valid MerklePath and mined block header are found for a ProvenTxReq txid.
*
* Performs the following storage updates (typically):
* 1. Lookup the exising `ProvenTxReq` record for its rawTx
* 2. Insert a new ProvenTx record using properties from `args` and rawTx, yielding a new provenTxId
* 3. Update ProvenTxReq record with status 'completed' and new provenTxId value (and history of status changed)
* 4. Unpack notify transactionIds from req and update each transaction's status to 'completed', provenTxId value.
* 5. Update ProvenTxReq history again to record that transactions have been notified.
* 6. Return results...
*
* Alterations of "typically" to handle:
*/
async updateProvenTxReqWithNewProvenTx(args) {
const req = await index_client_1.entity.ProvenTxReq.fromStorageId(this, args.provenTxReqId);
let proven;
if (req.provenTxId) {
// Someone beat us to it, grab what we need for results...
proven = new index_client_1.entity.ProvenTx((0, index_client_1.verifyOne)(await this.findProvenTxs({ partial: { txid: args.txid } })));
}
else {
let isNew;
({ proven, isNew } = await this.transaction(async (trx) => {
const { proven: api, isNew } = await this.findOrInsertProvenTx({
created_at: new Date(),
updated_at: new Date(),
provenTxId: 0,
txid: args.txid,
height: args.height,
index: args.index,
merklePath: args.merklePath,
rawTx: req.rawTx,
blockHash: args.blockHash,
merkleRoot: args.merkleRoot
}, trx);
proven = new index_client_1.entity.ProvenTx(api);
if (isNew) {
req.status = 'completed';
req.provenTxId = proven.provenTxId;
await req.updateStorageDynamicProperties(this, trx);
// upate the transaction notifications outside of storage transaction....
}
return { proven, isNew };
}));
if (isNew) {
const ids = req.notify.transactionIds || [];
if (ids.length > 0) {
for (const id of ids) {
try {
await this.updateTransaction(id, { provenTxId: proven.provenTxId, status: 'completed' });
req.addHistoryNote(`transaction ${id} notified of ProvenTx`);
}
catch (eu) {
const e = index_client_1.sdk.WalletError.fromUnknown(eu);
req.addHistoryNote({ what: 'transactionNotificationFailure', error: `${e.code}: ${e.description}` });
}
}
await req.updateStorageDynamicProperties(this);
}
}
}
const r = {
status: req.status,
history: req.apiHistory,
provenTxId: proven.provenTxId
};
return r;
}
/**
* For each spendable output in the 'default' basket of the authenticated user,
* verify that the output script, satoshis, vout and txid match that of an output
* still in the mempool of at least one service provider.
*
* @returns object with invalidSpendableOutputs array. A good result is an empty array.
*/
async confirmSpendableOutputs() {
var _a;
const invalidSpendableOutputs = [];
const users = await this.findUsers({ partial: {} });
for (const { userId } of users) {
const defaultBasket = (0, index_client_1.verifyOne)(await this.findOutputBaskets({ partial: { userId, name: 'default' } }));
const where = {
userId,
basketId: defaultBasket.basketId,
spendable: true,
};
const outputs = await this.findOutputs({ partial: where });
for (let i = outputs.length - 1; i >= 0; i--) {
const o = outputs[i];
const oid = (0, index_client_1.verifyId)(o.outputId);
if (o.spendable) {
let ok = false;
if (o.lockingScript && o.lockingScript.length > 0) {
const r = await this.getServices().getUtxoStatus((0, index_client_1.asString)(o.lockingScript), 'script');
if (r.status === 'success' && r.isUtxo && ((_a = r.details) === null || _a === void 0 ? void 0 : _a.length) > 0) {
const tx = await this.findTransactionById(o.transactionId);
if (tx && tx.txid && r.details.some(d => d.txid === tx.txid && d.satoshis === o.satoshis && d.index === o.vout)) {
ok = true;
}
}
}
if (!ok)
invalidSpendableOutputs.push(o);
}
}
}
return { invalidSpendableOutputs };
}
async updateProvenTxReqDynamics(id, update, trx) {
const partial = {};
if (update['updated_at'])
partial['updated_at'] = update['updated_at'];
if (update['provenTxId'])
partial['provenTxId'] = update['provenTxId'];
if (update['status'])
partial['status'] = update['status'];
if (update['attempts'])
partial['attempts'] = update['attempts'];
if (update['notified'])
partial['notified'] = update['notified'];
if (update['batch'])
partial['batch'] = update['batch'];
if (update['history'])
partial['history'] = update['history'];
if (update['notify'])
partial['notify'] = update['notify'];
return await this.updateProvenTxReq(id, partial, trx);
}
}
exports.StorageProvider = StorageProvider;
function validateStorageFeeModel(v) {
const r = {
model: 'sat/kb',
value: 1
};
if (typeof v === 'object') {
if (v.model !== 'sat/kb')
throw new index_client_1.sdk.WERR_INVALID_PARAMETER('StorageFeeModel.model', `"sat/kb"`);
if (typeof v.value === 'number') {
r.value = v.value;
}
}
return r;
}
//# sourceMappingURL=StorageProvider.js.map