UNPKG

@ckbfs/api

Version:

SDK for CKBFS protocol on CKB

625 lines (624 loc) 26.8 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.ensureHexPrefix = ensureHexPrefix; exports.createCKBFSCell = createCKBFSCell; exports.preparePublishTransaction = preparePublishTransaction; exports.createPublishTransaction = createPublishTransaction; exports.prepareAppendTransaction = prepareAppendTransaction; exports.createAppendTransaction = createAppendTransaction; exports.createAppendTransactionDry = createAppendTransactionDry; exports.publishCKBFS = publishCKBFS; exports.appendCKBFS = appendCKBFS; const core_1 = require("@ckb-ccc/core"); const checksum_1 = require("./checksum"); const molecule_1 = require("./molecule"); const witness_1 = require("./witness"); const constants_1 = require("./constants"); /** * Ensures a string is prefixed with '0x' * @param value The string to ensure is hex prefixed * @returns A hex prefixed string */ function ensureHexPrefix(value) { if (value.startsWith("0x")) { return value; } return `0x${value}`; } /** * Creates a CKBFS cell * @param options Options for creating the CKBFS cell * @returns The created cell output */ function createCKBFSCell(options) { const { contentType, filename, capacity, lock, network = constants_1.DEFAULT_NETWORK, version = constants_1.DEFAULT_VERSION, useTypeID = false, } = options; // Get CKBFS script config const config = (0, constants_1.getCKBFSScriptConfig)(network, version, useTypeID); // Create pre CKBFS type script const preCkbfsTypeScript = new core_1.Script(ensureHexPrefix(config.codeHash), config.hashType, "0x0000000000000000000000000000000000000000000000000000000000000000"); // Return the cell output return { lock, type: preCkbfsTypeScript, capacity: capacity || 200n * 100000000n, // Default 200 CKB }; } /** * Prepares a transaction for publishing a file to CKBFS without fee and change handling * You will need to manually set the typeID if you did not provide inputs, or just check is return value emptyTypeID is true * @param options Options for publishing the file * @returns Promise resolving to the prepared transaction and the output index of CKBFS Cell */ async function preparePublishTransaction(options) { const { from, contentChunks, contentType, filename, lock, capacity, network = constants_1.DEFAULT_NETWORK, version = constants_1.DEFAULT_VERSION, useTypeID = false, } = options; // Calculate checksum for the combined content const combinedContent = Buffer.concat(contentChunks); const checksum = await (0, checksum_1.calculateChecksum)(combinedContent); // Create CKBFS witnesses - each chunk already includes the CKBFS header // Pass 0 as version byte - this is the protocol version byte in the witness header // not to be confused with the Protocol Version (V1 vs V2) const ckbfsWitnesses = (0, witness_1.createChunkedCKBFSWitnesses)(contentChunks); // Calculate the actual witness indices where our content is placed const contentStartIndex = from?.witnesses.length || 1; const witnessIndices = Array.from({ length: contentChunks.length }, (_, i) => contentStartIndex + i); // Create CKBFS cell output data based on version let outputData; if (version === constants_1.ProtocolVersion.V1) { // V1 format: Single index field (a single number, not an array) // For V1, use the first index where content is placed outputData = molecule_1.CKBFSData.pack({ index: contentStartIndex, checksum, contentType: contentType, filename: filename, backLinks: [], }, version); } else { // V2 format: Multiple indexes (array of numbers) // For V2, use all the indices where content is placed outputData = molecule_1.CKBFSData.pack({ indexes: witnessIndices, checksum, contentType, filename, backLinks: [], }, version); } // Get CKBFS script config const config = (0, constants_1.getCKBFSScriptConfig)(network, version, useTypeID); const preCkbfsTypeScript = new core_1.Script(ensureHexPrefix(config.codeHash), config.hashType, "0x0000000000000000000000000000000000000000000000000000000000000000"); const ckbfsCellSize = BigInt(outputData.length + preCkbfsTypeScript.occupiedSize + lock.occupiedSize + 8) * 100000000n; // Create pre transaction without cell deps initially let preTx; if (from) { // If from is not empty, inject/merge the fields preTx = core_1.Transaction.from({ ...from, outputs: from.outputs.length === 0 ? [ createCKBFSCell({ contentType, filename, lock, network, version, useTypeID, capacity: ckbfsCellSize || capacity, }), ] : [ ...from.outputs, createCKBFSCell({ contentType, filename, lock, network, version, useTypeID, capacity: ckbfsCellSize || capacity, }), ], witnesses: from.witnesses.length === 0 ? [ [], // Empty secp witness for signing if not provided ...ckbfsWitnesses.map((w) => `0x${Buffer.from(w).toString("hex")}`), ] : [ ...from.witnesses, ...ckbfsWitnesses.map((w) => `0x${Buffer.from(w).toString("hex")}`), ], outputsData: from.outputsData.length === 0 ? [outputData] : [ ...from.outputsData, outputData, ], }); } else { preTx = core_1.Transaction.from({ outputs: [ createCKBFSCell({ contentType, filename, lock, network, version, useTypeID, capacity: ckbfsCellSize || capacity, }), ], witnesses: [ [], // Empty secp witness for signing ...ckbfsWitnesses.map((w) => `0x${Buffer.from(w).toString("hex")}`), ], outputsData: [outputData], }); } // Add the CKBFS dep group cell dependency preTx.addCellDeps({ outPoint: { txHash: ensureHexPrefix(config.depTxHash), index: config.depIndex || 0, }, depType: "depGroup", }); // Create type ID args const outputIndex = from ? from.outputs.length : 0; const args = preTx.inputs.length > 0 ? core_1.ccc.hashTypeId(preTx.inputs[0], outputIndex) : "0x0000000000000000000000000000000000000000000000000000000000000000"; // Create CKBFS type script with type ID const ckbfsTypeScript = new core_1.Script(ensureHexPrefix(config.codeHash), config.hashType, args); // Create final transaction with same cell deps as preTx const tx = core_1.Transaction.from({ cellDeps: preTx.cellDeps, witnesses: preTx.witnesses, outputsData: preTx.outputsData, inputs: preTx.inputs, outputs: outputIndex === 0 ? [ { lock, type: ckbfsTypeScript, capacity: preTx.outputs[outputIndex].capacity, }, ] : [ ...preTx.outputs.slice(0, outputIndex), // Include rest of outputs (e.g., change) { lock, type: ckbfsTypeScript, capacity: preTx.outputs[outputIndex].capacity, }, ], }); return { tx, outputIndex, emptyTypeID: args === "0x0000000000000000000000000000000000000000000000000000000000000000" }; } /** * Creates a transaction for publishing a file to CKBFS * @param signer The signer to use for the transaction * @param options Options for publishing the file * @returns Promise resolving to the created transaction */ async function createPublishTransaction(signer, options) { const { feeRate, lock, } = options; // Use preparePublishTransaction to create the base transaction const { tx: preTx, outputIndex, emptyTypeID } = await preparePublishTransaction(options); // Complete inputs by capacity await preTx.completeInputsByCapacity(signer); // Complete fee change to lock await preTx.completeFeeChangeToLock(signer, lock, feeRate || 2000); // If emptyTypeID is true, we need to create the proper type ID args if (emptyTypeID) { // Get CKBFS script config const config = (0, constants_1.getCKBFSScriptConfig)(options.network || constants_1.DEFAULT_NETWORK, options.version || constants_1.DEFAULT_VERSION, options.useTypeID || false); // Create type ID args const args = core_1.ccc.hashTypeId(preTx.inputs[0], outputIndex); // Create CKBFS type script with type ID const ckbfsTypeScript = new core_1.Script(ensureHexPrefix(config.codeHash), config.hashType, args); // Create final transaction with updated type script const tx = core_1.Transaction.from({ cellDeps: preTx.cellDeps, witnesses: preTx.witnesses, outputsData: preTx.outputsData, inputs: preTx.inputs, outputs: preTx.outputs.map((output, index) => index === outputIndex ? { lock, type: ckbfsTypeScript, capacity: output.capacity, } : output), }); return tx; } else { // If typeID was already set properly, just reset the first witness for signing const tx = core_1.Transaction.from({ cellDeps: preTx.cellDeps, witnesses: preTx.witnesses, outputsData: preTx.outputsData, inputs: preTx.inputs, outputs: preTx.outputs, }); return tx; } } /** * Prepares a transaction for appending content to a CKBFS file without fee and change handling * @param options Options for appending content * @returns Promise resolving to the prepared transaction and the output index of CKBFS Cell */ async function prepareAppendTransaction(options) { const { from, ckbfsCell, contentChunks, network = constants_1.DEFAULT_NETWORK, version = constants_1.DEFAULT_VERSION, } = options; const { outPoint, data, type, lock, capacity } = ckbfsCell; // Get CKBFS script config early to use version info const config = (0, constants_1.getCKBFSScriptConfig)(network, version); // Create CKBFS witnesses - each chunk already includes the CKBFS header // Pass 0 as version byte - this is the protocol version byte in the witness header // not to be confused with the Protocol Version (V1 vs V2) const ckbfsWitnesses = (0, witness_1.createChunkedCKBFSWitnesses)(contentChunks); // Combine the new content chunks for checksum calculation const combinedContent = Buffer.concat(contentChunks); // Update the existing checksum with the new content - this matches Adler32's // cumulative nature as required by Rule 11 in the RFC const contentChecksum = await (0, checksum_1.updateChecksum)(data.checksum, combinedContent); // Calculate the actual witness indices where our content is placed const contentStartIndex = from?.witnesses.length || 1; const witnessIndices = Array.from({ length: contentChunks.length }, (_, i) => contentStartIndex + i); // Create backlink for the current state based on version let newBackLink; if (version === constants_1.ProtocolVersion.V1) { // V1 format: Use index field (single number) newBackLink = { // In V1, field order is index, checksum, txHash // and index is a single number value, not an array index: data.index || (data.indexes && data.indexes.length > 0 ? data.indexes[0] : 0), checksum: data.checksum, txHash: outPoint.txHash, }; } else { // V2 format: Use indexes field (array of numbers) newBackLink = { // In V2, field order is indexes, checksum, txHash // and indexes is an array of numbers indexes: data.indexes || (data.index ? [data.index] : []), checksum: data.checksum, txHash: outPoint.txHash, }; } // Update backlinks - add the new one to the existing backlinks array const backLinks = [...(data.backLinks || []), newBackLink]; // Define output data based on version let outputData; if (version === constants_1.ProtocolVersion.V1) { // In V1, index is a single number, not an array // The first witness index is used (V1 can only reference one witness) outputData = molecule_1.CKBFSData.pack({ index: witnessIndices[0], // Use only the first index as a number checksum: contentChecksum, contentType: data.contentType, filename: data.filename, backLinks, }, constants_1.ProtocolVersion.V1); // Explicitly use V1 for packing } else { // In V2, indexes is an array of witness indices outputData = molecule_1.CKBFSData.pack({ indexes: witnessIndices, checksum: contentChecksum, contentType: data.contentType, filename: data.filename, backLinks, }, constants_1.ProtocolVersion.V2); // Explicitly use V2 for packing } // Calculate the required capacity for the output cell // This accounts for: // 1. The output data size // 2. The type script's occupied size // 3. The lock script's occupied size // 4. A constant of 8 bytes (for header overhead) const ckbfsCellSize = BigInt(outputData.length + type.occupiedSize + lock.occupiedSize + 8) * 100000000n; // Use the maximum value between calculated size and original capacity // to ensure we have enough capacity but don't decrease capacity unnecessarily const outputCapacity = ckbfsCellSize > capacity ? ckbfsCellSize : capacity; // Create initial transaction with the CKBFS cell input let preTx; if (from) { // If from is not empty, inject/merge the fields preTx = core_1.Transaction.from({ ...from, inputs: from.inputs.length === 0 ? [ { previousOutput: { txHash: outPoint.txHash, index: outPoint.index, }, since: "0x0", }, ] : [ ...from.inputs, { previousOutput: { txHash: outPoint.txHash, index: outPoint.index, }, since: "0x0", }, ], outputs: from.outputs.length === 0 ? [ { lock, type, capacity: outputCapacity, }, ] : [ ...from.outputs, { lock, type, capacity: outputCapacity, }, ], outputsData: from.outputsData.length === 0 ? [outputData] : [ ...from.outputsData, outputData, ], witnesses: from.witnesses.length === 0 ? [ [], // Empty secp witness for signing if not provided ...ckbfsWitnesses.map((w) => `0x${Buffer.from(w).toString("hex")}`), ] : [ ...from.witnesses, ...ckbfsWitnesses.map((w) => `0x${Buffer.from(w).toString("hex")}`), ], }); } else { preTx = core_1.Transaction.from({ inputs: [ { previousOutput: { txHash: outPoint.txHash, index: outPoint.index, }, since: "0x0", }, ], outputs: [ { lock, type, capacity: outputCapacity, }, ], outputsData: [outputData], witnesses: [ [], // Empty secp witness for signing ...ckbfsWitnesses.map((w) => `0x${Buffer.from(w).toString("hex")}`), ], }); } // Add the CKBFS dep group cell dependency preTx.addCellDeps({ outPoint: { txHash: ensureHexPrefix(config.depTxHash), index: config.depIndex || 0, }, depType: "depGroup", }); const outputIndex = from ? from.outputs.length : 0; return { tx: preTx, outputIndex }; } /** * Creates a transaction for appending content to a CKBFS file * @param signer The signer to use for the transaction * @param options Options for appending content * @returns Promise resolving to the created transaction */ async function createAppendTransaction(signer, options) { const { ckbfsCell, feeRate, } = options; const { lock } = ckbfsCell; // Use prepareAppendTransaction to create the base transaction const { tx: preTx, outputIndex } = await prepareAppendTransaction(options); // Get the recommended address to ensure lock script cell deps are included const address = await signer.getRecommendedAddressObj(); const inputsBefore = preTx.inputs.length; // If we need more capacity than the original cell had, add additional inputs if (preTx.outputs[outputIndex].capacity > ckbfsCell.capacity) { console.log(`Need additional capacity: ${preTx.outputs[outputIndex].capacity - ckbfsCell.capacity} shannons`); // Add more inputs to cover the increased capacity await preTx.completeInputsByCapacity(signer); } const witnesses = []; // add empty witness for signer if ckbfs's lock is the same as signer's lock if (address.script.hash() === lock.hash()) { witnesses.push("0x"); } // add ckbfs witnesses (skip the first witness which is for signing) witnesses.push(...preTx.witnesses.slice(1)); // Add empty witnesses for additional signer inputs // This is to ensure that the transaction is valid and can be signed for (let i = inputsBefore; i < preTx.inputs.length; i++) { witnesses.push("0x"); } preTx.witnesses = witnesses; // Complete fee await preTx.completeFeeChangeToLock(signer, address.script, feeRate || 2000); return preTx; } /** * Creates a transaction for appending content to a CKBFS file * @param signer The signer to use for the transaction * @param options Options for appending content * @returns Promise resolving to the created transaction */ async function createAppendTransactionDry(signer, options) { const { ckbfsCell, contentChunks, network = constants_1.DEFAULT_NETWORK, version = constants_1.DEFAULT_VERSION, } = options; const { outPoint, data, type, lock, capacity } = ckbfsCell; // Get CKBFS script config early to use version info const config = (0, constants_1.getCKBFSScriptConfig)(network, version); // Create CKBFS witnesses - each chunk already includes the CKBFS header // Pass 0 as version byte - this is the protocol version byte in the witness header // not to be confused with the Protocol Version (V1 vs V2) const ckbfsWitnesses = (0, witness_1.createChunkedCKBFSWitnesses)(contentChunks); // Combine the new content chunks for checksum calculation const combinedContent = Buffer.concat(contentChunks); // Update the existing checksum with the new content - this matches Adler32's // cumulative nature as required by Rule 11 in the RFC const contentChecksum = await (0, checksum_1.updateChecksum)(data.checksum, combinedContent); console.log(`Updated checksum from ${data.checksum} to ${contentChecksum} for appended content`); // Get the recommended address to ensure lock script cell deps are included const address = await signer.getRecommendedAddressObj(); // Calculate the actual witness indices where our content is placed // CKBFS data starts at index 1 if signer's lock script is the same as ckbfs's lock script // else CKBFS data starts at index 0 const contentStartIndex = address.script.hash() === lock.hash() ? 1 : 0; const witnessIndices = Array.from({ length: contentChunks.length }, (_, i) => contentStartIndex + i); // Create backlink for the current state based on version let newBackLink; if (version === constants_1.ProtocolVersion.V1) { // V1 format: Use index field (single number) newBackLink = { // In V1, field order is index, checksum, txHash // and index is a single number value, not an array index: data.index || (data.indexes && data.indexes.length > 0 ? data.indexes[0] : 0), checksum: data.checksum, txHash: outPoint.txHash, }; } else { // V2 format: Use indexes field (array of numbers) newBackLink = { // In V2, field order is indexes, checksum, txHash // and indexes is an array of numbers indexes: data.indexes || (data.index ? [data.index] : []), checksum: data.checksum, txHash: outPoint.txHash, }; } // Update backlinks - add the new one to the existing backlinks array const backLinks = [...(data.backLinks || []), newBackLink]; // Define output data based on version let outputData; if (version === constants_1.ProtocolVersion.V1) { // In V1, index is a single number, not an array // The first witness index is used (V1 can only reference one witness) outputData = molecule_1.CKBFSData.pack({ index: witnessIndices[0], // Use only the first index as a number checksum: contentChecksum, contentType: data.contentType, filename: data.filename, backLinks, }, constants_1.ProtocolVersion.V1); // Explicitly use V1 for packing } else { // In V2, indexes is an array of witness indices outputData = molecule_1.CKBFSData.pack({ indexes: witnessIndices, checksum: contentChecksum, contentType: data.contentType, filename: data.filename, backLinks, }, constants_1.ProtocolVersion.V2); // Explicitly use V2 for packing } // Calculate the required capacity for the output cell // This accounts for: // 1. The output data size // 2. The type script's occupied size // 3. The lock script's occupied size // 4. A constant of 8 bytes (for header overhead) const ckbfsCellSize = BigInt(outputData.length + type.occupiedSize + lock.occupiedSize + 8) * 100000000n; console.log(`Original capacity: ${capacity}, Calculated size: ${ckbfsCellSize}, Data size: ${outputData.length}`); // Use the maximum value between calculated size and original capacity // to ensure we have enough capacity but don't decrease capacity unnecessarily const outputCapacity = ckbfsCellSize > capacity ? ckbfsCellSize : capacity; // Create initial transaction with the CKBFS cell input const tx = core_1.Transaction.from({ inputs: [ { previousOutput: { txHash: outPoint.txHash, index: outPoint.index, }, since: "0x0", }, ], outputs: [ { lock, type, capacity: outputCapacity, }, ], outputsData: [outputData], }); // Add the CKBFS dep group cell dependency tx.addCellDeps({ outPoint: { txHash: ensureHexPrefix(config.depTxHash), index: config.depIndex || 0, }, depType: "depGroup", }); const inputsBefore = tx.inputs.length; // // If we need more capacity than the original cell had, add additional inputs // if (outputCapacity > capacity) { // console.log( // `Need additional capacity: ${outputCapacity - capacity} shannons`, // ); // // Add more inputs to cover the increased capacity // await tx.completeInputsByCapacity(signer); // } const witnesses = []; // add empty witness for signer if ckbfs's lock is the same as signer's lock if (address.script.hash() === lock.hash()) { witnesses.push("0x"); } // add ckbfs witnesses witnesses.push(...ckbfsWitnesses.map((w) => `0x${Buffer.from(w).toString("hex")}`)); // Add empty witnesses for signer's input // This is to ensure that the transaction is valid and can be signed for (let i = inputsBefore; i < tx.inputs.length; i++) { witnesses.push("0x"); } tx.witnesses = witnesses; // Complete fee //await tx.completeFeeChangeToLock(signer, address.script, feeRate || 2000); return tx; } /** * Creates a complete transaction for publishing a file to CKBFS * @param signer The signer to use for the transaction * @param options Options for publishing the file * @returns Promise resolving to the signed transaction */ async function publishCKBFS(signer, options) { const tx = await createPublishTransaction(signer, options); return signer.signTransaction(tx); } /** * Creates a complete transaction for appending content to a CKBFS file * @param signer The signer to use for the transaction * @param options Options for appending content * @returns Promise resolving to the signed transaction */ async function appendCKBFS(signer, options) { const tx = await createAppendTransaction(signer, options); return signer.signTransaction(tx); }