UNPKG

ecash-lib

Version:

Library for eCash transaction building

140 lines 5.75 kB
"use strict"; // Copyright (c) 2024 The Bitcoin developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. Object.defineProperty(exports, "__esModule", { value: true }); exports.alpBurn = exports.alpSend = exports.alpMint = exports.alpGenesis = exports.ALP_POLICY_MAX_OUTPUTS = exports.ALP_TOKEN_TYPE_STANDARD = exports.ALP_MAX_SIZE = exports.ALP_STANDARD = exports.ALP_LOKAD_ID = exports.ALP_LOKAD_ID_STR = void 0; const hex_js_1 = require("../io/hex.js"); const str_js_1 = require("../io/str.js"); const writerbytes_js_1 = require("../io/writerbytes.js"); const writerlength_js_1 = require("../io/writerlength.js"); const common_js_1 = require("./common.js"); /** LOKAD ID for ALP as string */ exports.ALP_LOKAD_ID_STR = 'SLP2'; /** LOKAD ID for ALP */ exports.ALP_LOKAD_ID = (0, str_js_1.strToBytes)(exports.ALP_LOKAD_ID_STR); /** ALP standard token type number */ exports.ALP_STANDARD = 0; /** ALP limits lengths/sizes to this number, e.g. the number of outputs */ exports.ALP_MAX_SIZE = 127; exports.ALP_TOKEN_TYPE_STANDARD = { protocol: 'ALP', type: 'ALP_TOKEN_TYPE_STANDARD', number: exports.ALP_STANDARD, }; /** * Although ALP_MAX_SIZE is 127, in practice we can only * handle 29 ALP outputs in a single OP_RETURN given the curent * 223-byte OP_RETURN size limit, and even this assumes * we are only working with 1 ALP token. * * For example an ALP tx that sends multiple tokens cannot support * 29 token outputs as the instructions will require more than 223 * bytes in the OP_RETURN. * * So, this is an upper bound on ALP outputs per current mempool * acceptance rules */ exports.ALP_POLICY_MAX_OUTPUTS = 29; /** Build an ALP GENESIS pushdata section, creating a new ALP token */ function alpGenesis(tokenType, genesisInfo, mintData) { const writeSection = (writer) => { writer.putBytes(exports.ALP_LOKAD_ID); writer.putU8(tokenType); putVarBytes(common_js_1.GENESIS, writer); putVarBytes((0, str_js_1.strToBytes)(genesisInfo.tokenTicker ?? ''), writer); putVarBytes((0, str_js_1.strToBytes)(genesisInfo.tokenName ?? ''), writer); putVarBytes((0, str_js_1.strToBytes)(genesisInfo.url ?? ''), writer); putVarBytes((0, hex_js_1.fromHex)(genesisInfo.data ?? ''), writer); putVarBytes((0, hex_js_1.fromHex)(genesisInfo.authPubkey ?? ''), writer); writer.putU8(genesisInfo.decimals ?? 0); putMintData(mintData, writer); }; const writerLength = new writerlength_js_1.WriterLength(); writeSection(writerLength); const writerBytes = new writerbytes_js_1.WriterBytes(writerLength.length); writeSection(writerBytes); return writerBytes.data; } exports.alpGenesis = alpGenesis; /** * Build an ALP MINT pushdata section, creating new ALP tokens and mint batons * of the given token ID. **/ function alpMint(tokenId, tokenType, mintData) { const tokenIdBytes = (0, hex_js_1.fromHexRev)(tokenId); const writeSection = (writer) => { writer.putBytes(exports.ALP_LOKAD_ID); writer.putU8(tokenType); putVarBytes(common_js_1.MINT, writer); writer.putBytes(tokenIdBytes); putMintData(mintData, writer); }; const writerLength = new writerlength_js_1.WriterLength(); writeSection(writerLength); const writerBytes = new writerbytes_js_1.WriterBytes(writerLength.length); writeSection(writerBytes); return writerBytes.data; } exports.alpMint = alpMint; /** * Build an ALP SEND pushdata section, moving ALP tokens to different outputs **/ function alpSend(tokenId, tokenType, sendAtomsArray) { const tokenIdBytes = (0, hex_js_1.fromHexRev)(tokenId); const writeSection = (writer) => { writer.putBytes(exports.ALP_LOKAD_ID); writer.putU8(tokenType); writer.putU8(common_js_1.SEND.length); writer.putBytes(common_js_1.SEND); writer.putBytes(tokenIdBytes); writer.putU8(sendAtomsArray.length); for (const atoms of sendAtomsArray) { putAlpAtoms(atoms, writer); } }; const writerLength = new writerlength_js_1.WriterLength(); writeSection(writerLength); const writerBytes = new writerbytes_js_1.WriterBytes(writerLength.length); writeSection(writerBytes); return writerBytes.data; } exports.alpSend = alpSend; /** Build an ALP BURN pushdata section, intentionally burning ALP tokens. */ function alpBurn(tokenId, tokenType, burnAtoms) { const tokenIdBytes = (0, hex_js_1.fromHexRev)(tokenId); const writeSection = (writer) => { writer.putBytes(exports.ALP_LOKAD_ID); writer.putU8(tokenType); writer.putU8(common_js_1.BURN.length); writer.putBytes(common_js_1.BURN); writer.putBytes(tokenIdBytes); putAlpAtoms(burnAtoms, writer); }; const writerLength = new writerlength_js_1.WriterLength(); writeSection(writerLength); const writerBytes = new writerbytes_js_1.WriterBytes(writerLength.length); writeSection(writerBytes); return writerBytes.data; } exports.alpBurn = alpBurn; function putMintData(mintData, writer) { writer.putU8(mintData.atomsArray.length); for (const atoms of mintData.atomsArray) { putAlpAtoms(atoms, writer); } writer.putU8(mintData.numBatons); } function putAlpAtoms(atoms, writer) { const atomsN = BigInt(atoms); writer.putU32(atomsN & 0xffffffffn); writer.putU16(atomsN >> 32n); } function putVarBytes(bytes, writer) { if (bytes.length > 127) { throw new Error('Length of bytes must be between 0 and 127'); } writer.putU8(bytes.length); writer.putBytes(bytes); } //# sourceMappingURL=alp.js.map