ecash-wallet
Version:
An ecash wallet class. Manage keys, build and broadcast txs. Includes support for tokens and agora.
1,321 lines (1,173 loc) • 100 kB
text/typescript
// Copyright (c) 2025 The Bitcoin developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
/**
* transactions.test.ts
*
* Build and broadcast eCash txs using the ecash-wallet lib and regtest
* Confirm validity of txs using chronik and chronik-client
*/
import { expect, use } from 'chai';
import chaiAsPromised from 'chai-as-promised';
import { ChronikClient } from 'chronik-client';
import {
Address,
DEFAULT_DUST_SATS,
Script,
toHex,
fromHex,
TxOutput,
SLP_MAX_SEND_OUTPUTS,
ALP_POLICY_MAX_OUTPUTS,
OP_RETURN_MAX_BYTES,
payment,
SLP_TOKEN_TYPE_FUNGIBLE,
ALP_TOKEN_TYPE_STANDARD,
SLP_TOKEN_TYPE_MINT_VAULT,
SLP_TOKEN_TYPE_NFT1_GROUP,
SLP_TOKEN_TYPE_NFT1_CHILD,
ALL_BIP143,
MAX_TX_SERSIZE,
OP_RETURN,
} from 'ecash-lib';
import { TestRunner } from 'ecash-lib/dist/test/testRunner.js';
import { Wallet } from '../src/wallet';
import { GENESIS_TOKEN_ID_PLACEHOLDER } from 'ecash-lib/dist/payment';
use(chaiAsPromised);
// Configure available satoshis
const NUM_COINS = 500;
const COIN_VALUE = 1100000000n;
const MOCK_DESTINATION_ADDRESS = Address.p2pkh('deadbeef'.repeat(5)).toString();
const MOCK_DESTINATION_SCRIPT = Script.fromAddress(MOCK_DESTINATION_ADDRESS);
// Discovered through empirical testing
const MAX_P2PKH_OUTPUTS_FOR_SINGLE_TX_SIZE_BROADCAST_LIMIT_AND_ONE_INPUT = 2935;
// Helper function
const getDustOutputs = (count: number) => {
const outputs: TxOutput[] = [];
for (let i = 0; i < count; i++) {
// Convert integer to hex string (without '0x' prefix)
let hex = i.toString(16);
// Pad with leading zeros to 40 characters (20 bytes)
// NB we assume the number is less than 40 bytes, i.e. < 0xffffffffffffffffffffffffffffffffffffffff
hex = hex.padStart(40, '0');
const script = Script.p2pkh(fromHex(hex));
outputs.push({ script, sats: DEFAULT_DUST_SATS });
}
return outputs;
};
describe('Wallet can build and broadcast on regtest', () => {
let runner: TestRunner;
let chronik: ChronikClient;
before(async () => {
// Setup using ecash-agora_base so we have agora plugin available
// ecash-wallet will support agora txs
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
await runner.setupCoins(NUM_COINS, COIN_VALUE);
});
after(() => {
runner.stop();
});
it('We can send an eCash tx to a single recipient or multiple recipients', async () => {
// Init the wallet
const testWallet = Wallet.fromSk(fromHex('12'.repeat(32)), chronik);
// Send 1M XEC to the wallet
const inputSats = 1_000_000_00n;
await runner.sendToScript(inputSats, testWallet.script);
// Sync the wallet
await testWallet.sync();
// We can send a tx to a single recipient
const sendSats = 546n;
const resp = await testWallet
.action({
outputs: [
{
script: MOCK_DESTINATION_SCRIPT,
sats: sendSats,
},
],
})
.build()
.broadcast();
const firstTxid = resp.broadcasted[0];
expect(firstTxid).to.equal(
'c979983f62cb14ea449dec2982fd34ab2cac522634f38ce4b0cd32601ef05047',
);
// We can check chronik and see that the tx had change added automatically
const firstTx = await chronik.tx(firstTxid);
expect(firstTx.size).to.equal(219);
// The tx was sent with default params so fee expected to be 1000 sats per kb
// inputSats = outputSats + fee
// inputSats = (sendSats + changeSats) + fee
// changeSats = inputSats - sendSats - fee
const expectedChange = inputSats - sendSats - BigInt(firstTx.size);
expect(firstTx.outputs[1].sats).to.equal(expectedChange);
// We can send to multiple recipients
const OUTPUTS_TO_TEST = 12;
const dozenOutputs = getDustOutputs(OUTPUTS_TO_TEST);
const respTwo = await testWallet
.action({
outputs: dozenOutputs,
})
.build()
.broadcast();
const secondTxid = respTwo.broadcasted[0];
expect(secondTxid).to.equal(
'a62b0564fcefa5e3fc857e8ad90a408c00421608d7fdc3d13f335a52f29b3fc8',
);
// We can go on to broadcast XEC in a chained tx to so many outputs that it would exceed the
// broadcast size limit of a single tx
// Send more sats to the wallet so we can afford this
await runner.sendToScript(10_000_000_00n, testWallet.script);
// We got 2,936 by iteratively testing what number of outputs will be "just over" the max size in this case
const willNeedChainedTxOutputCount = getDustOutputs(
MAX_P2PKH_OUTPUTS_FOR_SINGLE_TX_SIZE_BROADCAST_LIMIT_AND_ONE_INPUT +
1,
);
// We DO NOT get a size error if we build and do not broadcast
await testWallet.sync();
const builtOversized = testWallet
.action({
outputs: willNeedChainedTxOutputCount,
})
.build();
// We get 2 txs in this chain
expect(builtOversized.builtTxs).to.have.length(2);
// Both are under the max size
expect(builtOversized.builtTxs[0].size()).to.equal(99977);
expect(builtOversized.builtTxs[1].size()).to.equal(219);
// We can broadcast the txs
const chainedTxBroadcastResult = await builtOversized.broadcast();
expect(chainedTxBroadcastResult.success).to.equal(true);
expect(chainedTxBroadcastResult.broadcasted).to.have.length(2);
});
it('We can send a chained eCash tx to cover outputs that would create create a tx exceeding the broadcast limit', async () => {
// Init the wallet
const testWallet = Wallet.fromSk(fromHex('19'.repeat(32)), chronik);
// Send 1M
await runner.sendToScript(8_000_000n, testWallet.script);
const willNeedChainedTxOutputCount = getDustOutputs(
MAX_P2PKH_OUTPUTS_FOR_SINGLE_TX_SIZE_BROADCAST_LIMIT_AND_ONE_INPUT +
1,
);
// If we do not have the utxos to even cover one tx, we throw the usual msg
// Build without syncing to check
expect(() =>
testWallet
.action({
outputs: willNeedChainedTxOutputCount,
})
.build(),
).to.throw(
Error,
`Insufficient sats to complete tx. Need 1603056 additional satoshis to complete this Action.`,
);
// Sync this time
await testWallet.sync();
const builtOversized = testWallet
.action({
outputs: willNeedChainedTxOutputCount,
})
.build();
// We get 2 txs in this chain
expect(builtOversized.builtTxs).to.have.length(2);
// Both are under the max size
expect(builtOversized.builtTxs[0].size()).to.equal(99977);
expect(builtOversized.builtTxs[1].size()).to.equal(219);
// We can broadcast the txs
const chainedTxBroadcastResult = await builtOversized.broadcast();
expect(chainedTxBroadcastResult.success).to.equal(true);
expect(chainedTxBroadcastResult.broadcasted).to.have.length(2);
// The chained tx covers all outputs from the initial action, though they are not at the same outIdx after the chained outputs
const chainTxAlpha = await chronik.tx(
chainedTxBroadcastResult.broadcasted[0],
);
const chainTxBeta = await chronik.tx(
chainedTxBroadcastResult.broadcasted[1],
);
const allOutputsInChain = [
...chainTxAlpha.outputs,
...chainTxBeta.outputs,
];
// All of the action-requested outputs are in the chained txs that executed the action
for (const requestedOutput of willNeedChainedTxOutputCount) {
const outputInChain = allOutputsInChain.find(
o => o.outputScript === requestedOutput.script.toHex(),
);
expect(outputInChain).to.not.equal(undefined);
expect(outputInChain!.sats).to.equal(requestedOutput.sats);
}
// The chained tx had all requested outputs, plus
// +1 for user change
// +1 for the input of the 2nd required tx
expect(allOutputsInChain).to.have.length(
willNeedChainedTxOutputCount.length + 2,
);
});
it('We throw expected error if we can cover the outputs of an action that must be sent with chainedTxs but not the fee of such a tx', async () => {
// Init the wallet
const testWallet = Wallet.fromSk(fromHex('20'.repeat(32)), chronik);
// We choose the exact amount where we could cover a theoretical tx with too many outputs in one tx,
// but are unable to afford the marginal cost of chaining the action, i.e. the additional tx fees required
// for multiple txs
// Got this number through iteratively testing
await runner.sendToScript(8701019n, testWallet.script);
// Use many inputs so that the marginal fee of the chained tx is greater than dust, in this case it is 833 sats
const willNeedChainedTxOutputCount = getDustOutputs(15000);
// If we do not have the utxos to even cover one tx, we throw the usual msg
// Build without syncing to scheck
expect(() =>
testWallet
.action({
outputs: willNeedChainedTxOutputCount,
})
.build(),
).to.throw(
Error,
`Insufficient sats to complete tx. Need 8190000 additional satoshis to complete this Action.`,
);
// If we have enough utxos to cover the standard tx but not the chained tx, we throw
await testWallet.sync();
expect(() =>
testWallet
.action({
outputs: willNeedChainedTxOutputCount,
})
.build(),
).to.throw(
Error,
`Insufficient input sats (8701019) to complete required chained tx output sats`,
);
});
it('We can send a large chained tx with more than 3 txs', async () => {
// Init the wallet
const testWallet = Wallet.fromSk(fromHex('20'.repeat(32)), chronik);
// 1M XEC
await runner.sendToScript(100000000n, testWallet.script);
// Enough inputs so we need > 3 txs
const willNeedChainedTxOutputCount = getDustOutputs(15000);
// Send it
await testWallet.sync();
const result = await testWallet
.action({
outputs: willNeedChainedTxOutputCount,
})
.build()
.broadcast();
expect(result.success).to.equal(true);
});
it('We can broadcast a tx that is exactly MAX_TX_SERSIZE', async () => {
// Init the wallet
const testWallet = Wallet.fromSk(fromHex('21'.repeat(32)), chronik);
// Send 10M XEC to the wallet (in fact, this is not enough to cover our tx)
await runner.sendToScript(10_800_000_00n, testWallet.script);
// Create the max outputs for a p2pkh one-input tx, XEC only, under broadcast size limit
const maxOutputsOnSingleTxUnderBroadcastSizeLimit = getDustOutputs(
MAX_P2PKH_OUTPUTS_FOR_SINGLE_TX_SIZE_BROADCAST_LIMIT_AND_ONE_INPUT,
);
// Confirm this tx will be broadcast with a single tx
await testWallet.sync();
const builtSingleTx = testWallet
.action({
outputs: maxOutputsOnSingleTxUnderBroadcastSizeLimit,
})
.build();
expect(builtSingleTx.builtTxs).to.have.length(1);
expect(builtSingleTx.builtTxs[0].size()).to.equal(99977);
// Well let's add an OP_RETURN that will make this tx the EXACT size of the broadcast limit
const EXTRA_MARGINAL_BYTES_FOR_OP_RETURN_AND_OUTPUT = 10; // guess and check
const opReturnOutput = {
sats: 0n,
script: new Script(
new Uint8Array([
OP_RETURN,
...Array(
MAX_TX_SERSIZE -
99977 -
EXTRA_MARGINAL_BYTES_FOR_OP_RETURN_AND_OUTPUT,
).fill(0),
]),
),
};
// OP_RETURN is 14 bytes, its impact on the tx is +23 bytes
expect(opReturnOutput.script.toHex()).to.equal(
'6a00000000000000000000000000',
);
await testWallet.sync();
const exactLimitBroadcasted = await testWallet
.action({
outputs: [
opReturnOutput,
...maxOutputsOnSingleTxUnderBroadcastSizeLimit,
],
})
.build()
.broadcast();
// We have a single tx here that is exactly MAX_TX_SERSIZE
expect(exactLimitBroadcasted.broadcasted).to.have.length(1);
// We have the OP_RETURN
const tx = await chronik.tx(exactLimitBroadcasted.broadcasted[0]);
expect(tx.outputs[0].outputScript).to.equal(
opReturnOutput.script.toHex(),
);
});
it('If an OP_RETURN field pushes a tx over MAX_TX_SERSIZE, we can handle with a chained tx', async () => {
// Init the wallet
const testWallet = Wallet.fromSk(fromHex('22'.repeat(32)), chronik);
// Send 10M XEC to the wallet (in fact, this is not enough to cover our tx)
await runner.sendToScript(10_800_000_00n, testWallet.script);
// Create the max outputs for a p2pkh one-input tx, XEC only, under broadcast size limit
const maxOutputsOnSingleTxUnderBroadcastSizeLimit = getDustOutputs(
MAX_P2PKH_OUTPUTS_FOR_SINGLE_TX_SIZE_BROADCAST_LIMIT_AND_ONE_INPUT,
);
// Confirm this tx will be broadcast with a single tx
await testWallet.sync();
const builtSingleTx = testWallet
.action({
outputs: maxOutputsOnSingleTxUnderBroadcastSizeLimit,
})
.build();
expect(builtSingleTx.builtTxs).to.have.length(1);
expect(builtSingleTx.builtTxs[0].size()).to.equal(99977);
// Well let's add an OP_RETURN that will make this tx the EXACT size of the broadcast limit
const EXTRA_MARGINAL_BYTES_FOR_OP_RETURN_AND_OUTPUT = 10; // guess and check
const opReturnOutput = {
sats: 0n,
script: new Script(
new Uint8Array([
OP_RETURN,
...Array(
MAX_TX_SERSIZE -
99977 -
EXTRA_MARGINAL_BYTES_FOR_OP_RETURN_AND_OUTPUT,
).fill(0),
]),
),
};
// OP_RETURN is 14 bytes, its impact on the tx is +23 bytes
expect(opReturnOutput.script.toHex()).to.equal(
'6a00000000000000000000000000',
);
await testWallet.sync();
const exactLimitBuiltTx = testWallet
.action({
outputs: [
opReturnOutput,
...maxOutputsOnSingleTxUnderBroadcastSizeLimit,
],
})
.build();
// We have a single tx here that is exactly MAX_TX_SERSIZE
expect(exactLimitBuiltTx.builtTxs).to.have.length(1);
expect(exactLimitBuiltTx.builtTxs[0].size()).to.equal(MAX_TX_SERSIZE);
// OK well let's make the OP_RETURN a single byte longer
const opReturnOutputTooLong = {
sats: 0n,
script: new Script(
new Uint8Array([
OP_RETURN,
...Array(
MAX_TX_SERSIZE -
99977 -
EXTRA_MARGINAL_BYTES_FOR_OP_RETURN_AND_OUTPUT +
1,
).fill(0),
]),
),
};
expect(opReturnOutputTooLong.script.toHex()).to.equal(
'6a0000000000000000000000000000',
);
await testWallet.sync();
const oneByteTooLargeBroadcasted = await testWallet
.action({
outputs: [
opReturnOutputTooLong,
...maxOutputsOnSingleTxUnderBroadcastSizeLimit,
],
})
.build()
.broadcast();
expect(oneByteTooLargeBroadcasted.broadcasted).to.have.length(2);
// We have the OP_RETURN in the first tx
const opReturnTx = await chronik.tx(
oneByteTooLargeBroadcasted.broadcasted[0],
);
expect(opReturnTx.outputs[0].outputScript).to.equal(
opReturnOutputTooLong.script.toHex(),
);
// But not the second
const chainTxOmegaTx = await chronik.tx(
oneByteTooLargeBroadcasted.broadcasted[1],
);
expect(chainTxOmegaTx.outputs[0].outputScript).to.equal(
'76a9140000000000000000000000000000000000000b7588ac',
);
});
it('We can handle a chained tx with exactly enough outputs that a 3rd tx is required', async () => {
// Init the wallet
const testWallet = Wallet.fromSk(fromHex('23'.repeat(32)), chronik);
// Send 10M XEC
await runner.sendToScript(10_000_000_00n, testWallet.script);
// Iteratively discovered, this is true for 2 txs that each have 1 p2pkh input, all outputs p2pkh, no OP_RETURN
const exactlyEnoughOutputsForThreeMaxSizeTxs = 5870;
const exactlyEnoughOutputsForTwoMaxSizeTxs = getDustOutputs(
exactlyEnoughOutputsForThreeMaxSizeTxs,
);
// Sync this time
await testWallet.sync();
const twoFullTxsBuilt = testWallet
.clone()
.action({
outputs: exactlyEnoughOutputsForTwoMaxSizeTxs,
})
.build();
// We get 2 txs in this chain
expect(twoFullTxsBuilt.builtTxs).to.have.length(2);
// Both are JUST under the max size
expect(twoFullTxsBuilt.builtTxs[0].size()).to.equal(99977);
expect(twoFullTxsBuilt.builtTxs[1].size()).to.equal(99977);
const exactlyEnoughOutputsForAThirdTx = getDustOutputs(
exactlyEnoughOutputsForThreeMaxSizeTxs + 1,
);
// Let's add just one more output
await testWallet.sync();
const threeTxsBuilt = testWallet
.action({
outputs: exactlyEnoughOutputsForAThirdTx,
})
.build();
// We get 2 txs in this chain
expect(threeTxsBuilt.builtTxs).to.have.length(3);
// Both are JUST under the max size
expect(threeTxsBuilt.builtTxs[0].size()).to.equal(99977);
expect(threeTxsBuilt.builtTxs[1].size()).to.equal(99977);
expect(threeTxsBuilt.builtTxs[2].size()).to.equal(219);
// We can broadcast the txs
const chainedTxBroadcastResult = await threeTxsBuilt.broadcast();
expect(chainedTxBroadcastResult.success).to.equal(true);
expect(chainedTxBroadcastResult.broadcasted).to.have.length(3);
// The last tx has 2 outputs, because when we added one more output to total actions, we needed another chain tx, and we had
// to make room for that by using a change output in the second tx which was no longer chainTxOmega
const chainTxOmega = await chronik.tx(
chainedTxBroadcastResult.broadcasted[2],
);
expect(chainTxOmega.inputs.length).to.equal(1);
expect(chainTxOmega.outputs.length).to.equal(2);
});
it('We can handle SLP SLP_TOKEN_TYPE_FUNGIBLE token actions', async () => {
// Init the wallet
const slpWallet = Wallet.fromSk(fromHex('13'.repeat(32)), chronik);
// Send 1M XEC to the wallet
const inputSats = 1_000_000_00n;
await runner.sendToScript(inputSats, slpWallet.script);
// Sync the wallet
await slpWallet.sync();
// We can mint a fungible token with a mint baton
const slpGenesisInfo = {
tokenTicker: 'SLP',
tokenName: 'SLP Test Token',
url: 'cashtab.com',
decimals: 0,
};
const genesisMintQty = 1_000n;
// Construct the Action for this tx
const slpGenesisAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/** Mint qty at outIdx 1, per SLP spec */
{
sats: 546n,
tokenId: payment.GENESIS_TOKEN_ID_PLACEHOLDER,
script: slpWallet.script,
atoms: genesisMintQty,
},
/** Mint baton at outIdx 2, in valid spec range of range 2-255 */
{
sats: 546n,
script: slpWallet.script,
tokenId: payment.GENESIS_TOKEN_ID_PLACEHOLDER,
isMintBaton: true,
atoms: 0n,
},
],
tokenActions: [
/** SLP genesis action */
{
type: 'GENESIS',
tokenType: {
protocol: 'SLP',
type: 'SLP_TOKEN_TYPE_FUNGIBLE',
number: 1,
},
genesisInfo: slpGenesisInfo,
},
],
};
// Build and broadcast
const resp = await slpWallet
.action(slpGenesisAction)
.build()
.broadcast();
const slpGenesisTokenId = resp.broadcasted[0];
// It's a valid SLP genesis tx
const tokenInfo = await chronik.token(slpGenesisTokenId);
expect(tokenInfo.tokenType.type).to.equal('SLP_TOKEN_TYPE_FUNGIBLE');
// We can get token supply from checking utxos
const supply = (await chronik.tokenId(slpGenesisTokenId).utxos()).utxos
.map(utxo => utxo.token!.atoms)
.reduce((prev, curr) => prev + curr, 0n);
expect(supply).to.equal(genesisMintQty);
// We can mint more of our test token
const extendedMintQuantity = 333n;
// Construct the Action for this tx
const slpMintAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/** Mint qty at outIdx 1 (same as GENESIS) per SLP spec */
{
sats: 546n,
script: slpWallet.script,
tokenId: slpGenesisTokenId,
atoms: extendedMintQuantity,
},
/** Some normal outputs to show we can have a mint baton at an outIdx > 2 */
{ sats: 1000n, script: MOCK_DESTINATION_SCRIPT },
{ sats: 1001n, script: MOCK_DESTINATION_SCRIPT },
{ sats: 1002n, script: MOCK_DESTINATION_SCRIPT },
/** Mint baton at outIdx 5, in valid spec range of range 2-255 */
{
sats: 546n,
script: slpWallet.script,
tokenId: slpGenesisTokenId,
isMintBaton: true,
atoms: 0n,
},
],
tokenActions: [
/** SLP mint action */
{
type: 'MINT',
tokenId: slpGenesisTokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
],
};
// Build and broadcast the MINT tx
await slpWallet.action(slpMintAction).build().broadcast();
// Token supply has increased by the mint amount
const updatedSupply = (
await chronik.tokenId(slpGenesisTokenId).utxos()
).utxos
.map(utxo => utxo.token!.atoms)
.reduce((prev, curr) => prev + curr, 0n);
expect(updatedSupply).to.equal(genesisMintQty + extendedMintQuantity);
// Include SLP_MAX_SEND_OUTPUTS-1 outputs so we can (just) fit token change AND a leftover output
const tokenSendOutputs: payment.PaymentOutput[] = [];
for (let i = 1; i <= SLP_MAX_SEND_OUTPUTS - 1; i++) {
tokenSendOutputs.push({
sats: 546n,
script: slpWallet.script,
tokenId: slpGenesisTokenId,
atoms: BigInt(i),
});
}
// We can SEND our test token
const slpSendAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/**
* SEND qtys at outIdx 1-18
* In this way, we expect token change
* at outIdx 19, the higest available outIdx
* for SLP token outputs
*/
...tokenSendOutputs,
],
tokenActions: [
/** SLP send action */
{
type: 'SEND',
tokenId: slpGenesisTokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
],
};
const slpSendActionTooManyOutputs: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/**
* SEND qtys at outIdx 1-17
* In this way, we expect token change
* at outIdx 19, the higest available outIdx
* for SLP token outputs
*/
...tokenSendOutputs,
// Add a single additional token output
// We will try to add a token change output and this will be an output too far for spec
{
sats: 546n,
script: slpWallet.script,
tokenId: slpGenesisTokenId,
atoms: BigInt(1n),
},
],
tokenActions: [
/** SLP send action */
{
type: 'SEND',
tokenId: slpGenesisTokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
],
};
// For SLP, we can't build a tx that needs token change if that token change would be the 20th output
expect(() =>
slpWallet
.clone()
.action(slpSendActionTooManyOutputs)
.build(ALL_BIP143),
).to.throw(
Error,
`Tx needs a token change output to avoid burning atoms of ${slpGenesisTokenId}, but the token change output would be at outIdx 20 which is greater than the maximum allowed outIdx of 19 for SLP_TOKEN_TYPE_FUNGIBLE.`,
);
// Build and broadcast
const sendResponse = await slpWallet
.action(slpSendAction)
.build()
.broadcast();
const slpSendTxid = sendResponse.broadcasted[0];
const sendTx = await chronik.tx(slpSendTxid);
expect(sendTx.tokenEntries).to.have.length(1);
expect(sendTx.tokenEntries[0].txType).to.equal('SEND');
expect(sendTx.tokenEntries[0].actualBurnAtoms).to.equal(0n);
expect(sendTx.tokenStatus).to.equal('TOKEN_STATUS_NORMAL');
// We cannot burn an SLP amount that we do not have exact utxos for
const burnAtomsThatDoNotMatchUtxos = 300n;
const slpCannotBurnAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/**
* We do not specify any token outputs
* for an SLP burn action
*/
],
tokenActions: [
/** SLP burn action */
{
type: 'BURN',
tokenId: slpGenesisTokenId,
burnAtoms: burnAtomsThatDoNotMatchUtxos,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
],
};
const burnAtoms = 333n;
const slpBurnAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/**
* We do not specify any token outputs
* for an SLP burn action
*/
],
tokenActions: [
/** SLP burn action */
{
type: 'BURN',
tokenId: slpGenesisTokenId,
burnAtoms,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
],
};
// Build and broadcast
const burnResponse = await slpWallet
.action(slpBurnAction)
.build()
.broadcast();
const burnTx = await chronik.tx(burnResponse.broadcasted[0]);
expect(burnTx.tokenEntries).to.have.length(1);
expect(burnTx.tokenEntries[0].txType).to.equal('BURN');
expect(burnTx.tokenEntries[0].actualBurnAtoms).to.equal(burnAtoms);
expect(burnTx.tokenEntries[0].intentionalBurnAtoms).to.equal(burnAtoms);
expect(burnTx.tokenEntries[0].burnSummary).to.equal(``);
expect(burnTx.tokenStatus).to.equal('TOKEN_STATUS_NORMAL');
// We can burn exact atoms that do not match an existing utxo with a chained tx
const chainedBurn = await slpWallet
.action(slpCannotBurnAction)
.build()
.broadcast();
expect(chainedBurn.success).to.equal(true);
expect(chainedBurn.broadcasted).to.have.length(2);
const burnUtxoPrepTxid = chainedBurn.broadcasted[0];
const chainedBurnTxid = chainedBurn.broadcasted[1];
const burnUtxoPrepTx = await chronik.tx(burnUtxoPrepTxid);
const chainedBurnTx = await chronik.tx(chainedBurnTxid);
expect(burnUtxoPrepTx.tokenEntries).to.have.length(1);
expect(burnUtxoPrepTx.tokenEntries[0].txType).to.equal('SEND');
expect(burnUtxoPrepTx.tokenEntries[0].actualBurnAtoms).to.equal(0n);
expect(burnUtxoPrepTx.tokenStatus).to.equal('TOKEN_STATUS_NORMAL');
expect(chainedBurnTx.tokenEntries).to.have.length(1);
expect(chainedBurnTx.tokenEntries[0].txType).to.equal('BURN');
expect(chainedBurnTx.tokenEntries[0].actualBurnAtoms).to.equal(
burnAtomsThatDoNotMatchUtxos,
);
expect(chainedBurnTx.tokenEntries[0].intentionalBurnAtoms).to.equal(
burnAtomsThatDoNotMatchUtxos,
);
expect(chainedBurnTx.tokenEntries[0].burnSummary).to.equal(``);
expect(chainedBurnTx.tokenStatus).to.equal('TOKEN_STATUS_NORMAL');
});
it('We can handle ALP ALP_TOKEN_TYPE_STANDARD token actions', async () => {
// Init the wallet
const alpWallet = Wallet.fromSk(fromHex('14'.repeat(32)), chronik);
// Send 1M XEC to the wallet
const inputSats = 1_000_000_00n;
await runner.sendToScript(inputSats, alpWallet.script);
// Sync the wallet
await alpWallet.sync();
// We can GENESIS a fungible token with multiple mint quantities and multiple mint batons
const alpGenesisInfo = {
tokenTicker: 'ALP',
tokenName: 'ALP Test Token',
url: 'cashtab.com',
decimals: 0,
/** ALP allows arbitrary data in genesis */
data: 'deadbeef',
authPubkey: toHex(alpWallet.pk),
};
const genesisMintQtyAlpha = 1_000n;
const genesisMintQtyBeta = 2_000n;
// Construct the Action for this tx
const alpGenesisAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/** Misc XEC output at outIdx 1, there is no spec req in ALP for genesis qty at outIdx 1 */
{ sats: 5_555n, script: alpWallet.script },
/** Mint qty at outIdx 2 for a token we do not have */
{
sats: 546n,
script: alpWallet.script,
tokenId: payment.GENESIS_TOKEN_ID_PLACEHOLDER,
atoms: genesisMintQtyAlpha,
},
/** Another misc XEC output at outIdx 2, to show we support non-consecutive mint quantities */
{ sats: 3_333n, script: alpWallet.script },
/** Mint qty at outIdx 3 */
{
sats: 546n,
script: alpWallet.script,
tokenId: payment.GENESIS_TOKEN_ID_PLACEHOLDER,
atoms: genesisMintQtyBeta,
},
/**
* Another misc XEC output at outIdx 4, to show
* that mintBaton outIdx does not necessarily
* immediately follow mint qty */
{ sats: 5_555n, script: alpWallet.script },
/** Mint baton at outIdx 5 */
{
sats: 546n,
script: alpWallet.script,
tokenId: payment.GENESIS_TOKEN_ID_PLACEHOLDER,
isMintBaton: true,
atoms: 0n,
},
/** Another mint baton at outIdx 6 */
{
sats: 546n,
script: alpWallet.script,
tokenId: payment.GENESIS_TOKEN_ID_PLACEHOLDER,
isMintBaton: true,
atoms: 0n,
},
],
tokenActions: [
{
type: 'GENESIS',
tokenType: {
protocol: 'ALP',
type: 'ALP_TOKEN_TYPE_STANDARD',
number: 0,
},
genesisInfo: alpGenesisInfo,
},
],
};
// Build and broadcast
const resp = await alpWallet
.action(alpGenesisAction)
.build()
.broadcast();
const alpGenesisTokenId = resp.broadcasted[0];
// It's a valid ALP genesis tx
const tokenInfo = await chronik.token(alpGenesisTokenId);
expect(tokenInfo.tokenType.type).to.equal('ALP_TOKEN_TYPE_STANDARD');
// We can get token supply from checking utxos
const supply = (await chronik.tokenId(alpGenesisTokenId).utxos()).utxos
.map(utxo => utxo.token!.atoms)
.reduce((prev, curr) => prev + curr, 0n);
expect(supply).to.equal(genesisMintQtyAlpha + genesisMintQtyBeta);
// We get the expected number of mint batons
const alpTokenUtxos = await chronik.tokenId(alpGenesisTokenId).utxos();
let numBatons = 0;
alpTokenUtxos.utxos.forEach(tokenUtxo => {
if (tokenUtxo.token?.isMintBaton) {
numBatons += 1;
}
});
expect(numBatons).to.equal(2);
// We can mint more of our ALP test token
// NB we cannot combine MINT with SEND for ALP txs
// NB it would be on-spec to combine MINT with BURN for ALP,
// but this is NOT supported by ecash-wallet as we would have to
// provide exact-quantity utxos
const extendedMintQuantityAlpha = 3_000n;
const extendedMintQuantityBeta = 5_000n;
// Construct the Action for this tx
const alpMintAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/** Mint qty at outIdx 1 */
{
sats: 546n,
script: alpWallet.script,
tokenId: alpGenesisTokenId,
atoms: extendedMintQuantityAlpha,
},
/** A non-token output at outIdx 2 */
{ sats: 1000n, script: MOCK_DESTINATION_SCRIPT },
/** Another mint qty at outIdx 3 as ALP supports multiple non-consecutive mint quantities */
{
sats: 546n,
script: alpWallet.script,
tokenId: alpGenesisTokenId,
atoms: extendedMintQuantityBeta,
},
/** Some normal outputs to show we can have a mint baton anywhere later */
{ sats: 1001n, script: MOCK_DESTINATION_SCRIPT },
{ sats: 1002n, script: MOCK_DESTINATION_SCRIPT },
/** Mint baton at outIdx 6 */
{
sats: 546n,
script: alpWallet.script,
tokenId: alpGenesisTokenId,
isMintBaton: true,
atoms: 0n,
},
/** Another mint baton at outIdx 7 because ALP lets you mint multiple mint batons */
{
sats: 546n,
script: alpWallet.script,
tokenId: alpGenesisTokenId,
isMintBaton: true,
atoms: 0n,
},
],
tokenActions: [
/** ALP mint action */ {
type: 'MINT',
tokenId: alpGenesisTokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
],
};
// Build and broadcast the MINT tx
await alpWallet.action(alpMintAction).build().broadcast();
// Token supply has increased by the mint amount
const updatedSupply = (
await chronik.tokenId(alpGenesisTokenId).utxos()
).utxos
.map(utxo => utxo.token!.atoms)
.reduce((prev, curr) => prev + curr, 0n);
expect(updatedSupply).to.equal(
genesisMintQtyAlpha +
genesisMintQtyBeta +
extendedMintQuantityAlpha +
extendedMintQuantityBeta,
);
// Now we expect an additional mint baton
const alpTokenUtxosAfterMint = await chronik
.tokenId(alpGenesisTokenId)
.utxos();
let numBatonsNow = 0;
alpTokenUtxosAfterMint.utxos.forEach(tokenUtxo => {
if (tokenUtxo.token?.isMintBaton) {
numBatonsNow += 1;
}
});
expect(numBatonsNow).to.equal(numBatons + 1);
// We can MINT and BURN the same tokenId ... if we want to for some reason
const extendedMintQuantityDelta = 1n;
// NB the burn qty must be exactly summable by inputs for a MINT + BURN, as we can not also have SEND for the same tokenId
const burnAtomsWithMint = 1000n;
// Construct the Action for this tx
const alpMintandBurnAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/** Mint qty at outIdx 1 */
{
sats: 546n,
script: alpWallet.script,
tokenId: alpGenesisTokenId,
atoms: extendedMintQuantityDelta,
},
/** Mint baton at outIdx 2 */
{
sats: 546n,
script: alpWallet.script,
tokenId: alpGenesisTokenId,
atoms: 0n,
isMintBaton: true,
},
],
tokenActions: [
{
type: 'MINT',
tokenId: alpGenesisTokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
{
type: 'BURN',
tokenId: alpGenesisTokenId,
burnAtoms: burnAtomsWithMint,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
],
};
// Build and broadcast the MINT tx
const mintBurn = await alpWallet
.action(alpMintandBurnAction)
.build()
.broadcast();
const mintAndBurnTx = await chronik.tx(mintBurn.broadcasted[0]);
// This is a valid MINT and BURN tx
expect(mintAndBurnTx.tokenEntries).to.have.length(1);
expect(mintAndBurnTx.tokenEntries[0].txType).to.equal('MINT');
expect(mintAndBurnTx.tokenEntries[0].actualBurnAtoms).to.equal(
burnAtomsWithMint,
);
expect(mintAndBurnTx.tokenEntries[0].intentionalBurnAtoms).to.equal(
burnAtomsWithMint,
);
expect(mintAndBurnTx.tokenEntries[0].burnsMintBatons).to.equal(false);
// I dunno if I would call this normal but I don't have a better idea
expect(mintAndBurnTx.tokenStatus).to.equal('TOKEN_STATUS_NORMAL');
// Token supply has increased by the mint amount AND decreased by the burn amount 🤯
const latestSupply = (
await chronik.tokenId(alpGenesisTokenId).utxos()
).utxos
.map(utxo => utxo.token!.atoms)
.reduce((prev, curr) => prev + curr, 0n);
expect(latestSupply).to.equal(
updatedSupply + extendedMintQuantityDelta - burnAtomsWithMint,
);
// We can mint more, and also include a second genesis tx
/**
* TODO
* [] Perhaps we should make minting 1 mint baton the DEFAULT
* condition. Could see it being pretty easy to accidentally
* NOT mint another baton, thereby burning (mb the only) baton
*/
const extendedMintQuantityGamma = 5n;
const alpGenesisBetaMintQty = 1000n;
const alpGenesisInfoBeta = {
tokenTicker: 'BETA',
tokenName: 'ALP Test Token Beta',
url: 'cashtab.com',
decimals: 9,
/** ALP allows arbitrary data in genesis */
data: 'abadcafe',
authPubkey: toHex(alpWallet.pk),
};
// Construct the Action for this tx
const alpGenesisAndMintAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/** Genesis mint qty at outIdx 1*/
{
sats: 546n,
script: alpWallet.script,
tokenId: payment.GENESIS_TOKEN_ID_PLACEHOLDER,
atoms: alpGenesisBetaMintQty,
},
/** NB no mint baton for this genesis */
/** Mint qty for slpGenesisAlpha at outIdx 1*/
{
sats: 546n,
script: alpWallet.script,
tokenId: alpGenesisTokenId,
atoms: extendedMintQuantityGamma,
},
/** NB no mint baton for alpGenesisTokenId, so impact is we burn a mint baton */
],
tokenActions: [
{
type: 'GENESIS',
tokenType: {
protocol: 'ALP',
type: 'ALP_TOKEN_TYPE_STANDARD',
number: 0,
},
genesisInfo: alpGenesisInfoBeta,
},
{
type: 'MINT',
tokenId: alpGenesisTokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
],
};
// Build and broadcast
const genesisAndMintResp = await alpWallet
.action(alpGenesisAndMintAction)
.build()
.broadcast();
const alpGenesisTokenIdBeta = genesisAndMintResp.broadcasted[0];
// It's a valid ALP genesis tx
const tokenInfoBeta = await chronik.token(alpGenesisTokenIdBeta);
expect(tokenInfoBeta.tokenType.type).to.equal(
'ALP_TOKEN_TYPE_STANDARD',
);
// We can get token supply from checking utxos
const tokenBetaSupply = (
await chronik.tokenId(alpGenesisTokenIdBeta).utxos()
).utxos
.map(utxo => utxo.token!.atoms)
.reduce((prev, curr) => prev + curr, 0n);
expect(tokenBetaSupply).to.equal(alpGenesisBetaMintQty);
// We get the expected number of mint batons for the new token, 0
// NB we burned a mint baton with this tx without specifying any burn instructions
// [] TODO, we should throw an error for this condition
const alpTokenUtxosBeta = await chronik
.tokenId(alpGenesisTokenIdBeta)
.utxos();
let numBatonsBeta = 0;
alpTokenUtxosBeta.utxos.forEach(tokenUtxo => {
if (tokenUtxo.token?.isMintBaton) {
numBatonsBeta += 1;
}
});
expect(numBatonsBeta).to.equal(0);
// We burned a mint baton for the first token
const alpTokenUtxosAfterSecondMint = await chronik
.tokenId(alpGenesisTokenId)
.utxos();
let numBatonsAfterBurn = 0;
alpTokenUtxosAfterSecondMint.utxos.forEach(tokenUtxo => {
if (tokenUtxo.token?.isMintBaton) {
numBatonsAfterBurn += 1;
}
});
expect(numBatonsAfterBurn).to.equal(numBatonsNow - 1);
// We can send multiple tokens in the same tx
// Note that we expect a change output for BOTH tokens
// So lets only include TWO fewer outputs than ALP_POLICY_MAX_OUTPUTS
const alpSendOutputs: payment.PaymentOutput[] = [];
const EXPECTED_CHANGE_OUTPUTS = 2;
for (
let i = 1;
i <= ALP_POLICY_MAX_OUTPUTS - EXPECTED_CHANGE_OUTPUTS;
i++
) {
alpSendOutputs.push({
sats: 546n,
script: MOCK_DESTINATION_SCRIPT,
// mix of outputs for each token
tokenId:
i % 2 === 0 ? alpGenesisTokenId : alpGenesisTokenIdBeta,
atoms: BigInt(i),
});
}
// We can SEND our test token
const alpSendAction: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/**
* SEND qtys at outIdx 1-27
* In this way, we expect token change
* at outIdx 29, the higest available outIdx
* for ALP token outputs
*/
...alpSendOutputs,
],
tokenActions: [
/** ALP send action for first token */
{
type: 'SEND',
tokenId: alpGenesisTokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
/** ALP send action for 2nd token */
{
type: 'SEND',
tokenId: alpGenesisTokenIdBeta,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
],
};
// Build and broadcast
// Looks like this should work. After all, we will have 29 outputs.
// But it won't, because we still have to push the 0 atoms into two atomsArrays
// So we exceed the OP_RETURN
expect(() =>
alpWallet.clone().action(alpSendAction).build(ALL_BIP143),
).to.throw(
Error,
`Specified action results in OP_RETURN of 434 bytes, vs max allowed of ${OP_RETURN_MAX_BYTES}.`,
);
// Ok let's cull some outputs
// Take the first 9 outputs. NB this will give us an OP_RETURN of 218 bytes
const alpSendOutputsThatFit = alpSendOutputs.slice(0, 9);
const alpSendActionThatWorks: payment.Action = {
outputs: [
/** Blank OP_RETURN at outIdx 0 */
{ sats: 0n },
/**
* SEND qtys at outIdx 1-9
* In this way, we expect token change
* at outIdx 10 and outIdx 11
*
* Not the highest available outIdx for ALP,
* but the highest available given a 223-byte
* OP_RETURN constraint
*/
...alpSendOutputsThatFit,
],
tokenActions: [
/** ALP send action for first token */
{
type: 'SEND',
tokenId: alpGenesisTokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
/** ALP send action for 2nd token */
{
type: 'SEND',
tokenId: alpGenesisTokenIdBeta,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
],
};
const alpSendResponse = await alpWallet
.action(alpSendActionThatWorks)
.build()
.broadcast();
const alpSendTxid = alpSendResponse.broadcasted[0];
const sendTx = await chronik.tx(alpSendTxid);
// We sent two tokens
expect(sendTx.tokenEntries).to.have.length(2);
expect(sendTx.tokenEntries[0].txType).to.equal('SEND');
expect(sendTx.tokenEntries[0].actualBurnAt