@pod-protocol/sdk
Version:
TypeScript SDK for PoD Protocol - AI agent communication on Solana
1,385 lines (1,371 loc) • 42.8 kB
JavaScript
'use strict';
var base$1 = require('../base-4VR-G3Dc.js');
var keccak = require('keccak');
require('../types-OQd1rGtn.js');
require('node:events');
require('ws');
function equals$1(aa, bb) {
if (aa === bb) {
return true;
}
if (aa.byteLength !== bb.byteLength) {
return false;
}
for (let ii = 0; ii < aa.byteLength; ii++) {
if (aa[ii] !== bb[ii]) {
return false;
}
}
return true;
}
function coerce(o) {
if (o instanceof Uint8Array && o.constructor.name === 'Uint8Array') {
return o;
}
if (o instanceof ArrayBuffer) {
return new Uint8Array(o);
}
if (ArrayBuffer.isView(o)) {
return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
}
throw new Error('Unknown type, must be binary type');
}
/* eslint-disable */
// base-x encoding / decoding
// Copyright (c) 2018 base-x contributors
// Copyright (c) 2014-2018 The Bitcoin Core developers (base58.cpp)
// Distributed under the MIT software license, see the accompanying
// file LICENSE or http://www.opensource.org/licenses/mit-license.php.
/**
* @param {string} ALPHABET
* @param {any} name
*/
function base(ALPHABET, name) {
if (ALPHABET.length >= 255) {
throw new TypeError('Alphabet too long');
}
var BASE_MAP = new Uint8Array(256);
for (var j = 0; j < BASE_MAP.length; j++) {
BASE_MAP[j] = 255;
}
for (var i = 0; i < ALPHABET.length; i++) {
var x = ALPHABET.charAt(i);
var xc = x.charCodeAt(0);
if (BASE_MAP[xc] !== 255) {
throw new TypeError(x + ' is ambiguous');
}
BASE_MAP[xc] = i;
}
var BASE = ALPHABET.length;
var LEADER = ALPHABET.charAt(0);
var FACTOR = Math.log(BASE) / Math.log(256); // log(BASE) / log(256), rounded up
var iFACTOR = Math.log(256) / Math.log(BASE); // log(256) / log(BASE), rounded up
/**
* @param {any[] | Iterable<number>} source
*/
function encode(source) {
// @ts-ignore
if (source instanceof Uint8Array)
;
else if (ArrayBuffer.isView(source)) {
source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
}
else if (Array.isArray(source)) {
source = Uint8Array.from(source);
}
if (!(source instanceof Uint8Array)) {
throw new TypeError('Expected Uint8Array');
}
if (source.length === 0) {
return '';
}
// Skip & count leading zeroes.
var zeroes = 0;
var length = 0;
var pbegin = 0;
var pend = source.length;
while (pbegin !== pend && source[pbegin] === 0) {
pbegin++;
zeroes++;
}
// Allocate enough space in big-endian base58 representation.
var size = ((pend - pbegin) * iFACTOR + 1) >>> 0;
var b58 = new Uint8Array(size);
// Process the bytes.
while (pbegin !== pend) {
var carry = source[pbegin];
// Apply "b58 = b58 * 256 + ch".
var i = 0;
for (var it1 = size - 1; (carry !== 0 || i < length) && (it1 !== -1); it1--, i++) {
carry += (256 * b58[it1]) >>> 0;
b58[it1] = (carry % BASE) >>> 0;
carry = (carry / BASE) >>> 0;
}
if (carry !== 0) {
throw new Error('Non-zero carry');
}
length = i;
pbegin++;
}
// Skip leading zeroes in base58 result.
var it2 = size - length;
while (it2 !== size && b58[it2] === 0) {
it2++;
}
// Translate the result into a string.
var str = LEADER.repeat(zeroes);
for (; it2 < size; ++it2) {
str += ALPHABET.charAt(b58[it2]);
}
return str;
}
/**
* @param {string | string[]} source
*/
function decodeUnsafe(source) {
if (typeof source !== 'string') {
throw new TypeError('Expected String');
}
if (source.length === 0) {
return new Uint8Array();
}
var psz = 0;
// Skip leading spaces.
if (source[psz] === ' ') {
return;
}
// Skip and count leading '1's.
var zeroes = 0;
var length = 0;
while (source[psz] === LEADER) {
zeroes++;
psz++;
}
// Allocate enough space in big-endian base256 representation.
var size = (((source.length - psz) * FACTOR) + 1) >>> 0; // log(58) / log(256), rounded up.
var b256 = new Uint8Array(size);
// Process the characters.
while (source[psz]) {
// Decode character
var carry = BASE_MAP[source.charCodeAt(psz)];
// Invalid character
if (carry === 255) {
return;
}
var i = 0;
for (var it3 = size - 1; (carry !== 0 || i < length) && (it3 !== -1); it3--, i++) {
carry += (BASE * b256[it3]) >>> 0;
b256[it3] = (carry % 256) >>> 0;
carry = (carry / 256) >>> 0;
}
if (carry !== 0) {
throw new Error('Non-zero carry');
}
length = i;
psz++;
}
// Skip trailing spaces.
if (source[psz] === ' ') {
return;
}
// Skip leading zeroes in b256.
var it4 = size - length;
while (it4 !== size && b256[it4] === 0) {
it4++;
}
var vch = new Uint8Array(zeroes + (size - it4));
var j = zeroes;
while (it4 !== size) {
vch[j++] = b256[it4++];
}
return vch;
}
/**
* @param {string | string[]} string
*/
function decode(string) {
var buffer = decodeUnsafe(string);
if (buffer) {
return buffer;
}
throw new Error(`Non-${name} character`);
}
return {
encode: encode,
decodeUnsafe: decodeUnsafe,
decode: decode
};
}
var src = base;
var _brrp__multiformats_scope_baseX = src;
/**
* Class represents both BaseEncoder and MultibaseEncoder meaning it
* can be used to encode to multibase or base encode without multibase
* prefix.
*/
class Encoder {
name;
prefix;
baseEncode;
constructor(name, prefix, baseEncode) {
this.name = name;
this.prefix = prefix;
this.baseEncode = baseEncode;
}
encode(bytes) {
if (bytes instanceof Uint8Array) {
return `${this.prefix}${this.baseEncode(bytes)}`;
}
else {
throw Error('Unknown type, must be binary type');
}
}
}
/**
* Class represents both BaseDecoder and MultibaseDecoder so it could be used
* to decode multibases (with matching prefix) or just base decode strings
* with corresponding base encoding.
*/
class Decoder {
name;
prefix;
baseDecode;
prefixCodePoint;
constructor(name, prefix, baseDecode) {
this.name = name;
this.prefix = prefix;
const prefixCodePoint = prefix.codePointAt(0);
/* c8 ignore next 3 */
if (prefixCodePoint === undefined) {
throw new Error('Invalid prefix character');
}
this.prefixCodePoint = prefixCodePoint;
this.baseDecode = baseDecode;
}
decode(text) {
if (typeof text === 'string') {
if (text.codePointAt(0) !== this.prefixCodePoint) {
throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
}
return this.baseDecode(text.slice(this.prefix.length));
}
else {
throw Error('Can only multibase decode strings');
}
}
or(decoder) {
return or(this, decoder);
}
}
class ComposedDecoder {
decoders;
constructor(decoders) {
this.decoders = decoders;
}
or(decoder) {
return or(this, decoder);
}
decode(input) {
const prefix = input[0];
const decoder = this.decoders[prefix];
if (decoder != null) {
return decoder.decode(input);
}
else {
throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
}
}
}
function or(left, right) {
return new ComposedDecoder({
...(left.decoders ?? { [left.prefix]: left }),
...(right.decoders ?? { [right.prefix]: right })
});
}
class Codec {
name;
prefix;
baseEncode;
baseDecode;
encoder;
decoder;
constructor(name, prefix, baseEncode, baseDecode) {
this.name = name;
this.prefix = prefix;
this.baseEncode = baseEncode;
this.baseDecode = baseDecode;
this.encoder = new Encoder(name, prefix, baseEncode);
this.decoder = new Decoder(name, prefix, baseDecode);
}
encode(input) {
return this.encoder.encode(input);
}
decode(input) {
return this.decoder.decode(input);
}
}
function from({ name, prefix, encode, decode }) {
return new Codec(name, prefix, encode, decode);
}
function baseX({ name, prefix, alphabet }) {
const { encode, decode } = _brrp__multiformats_scope_baseX(alphabet, name);
return from({
prefix,
name,
encode,
decode: (text) => coerce(decode(text))
});
}
function decode$3(string, alphabetIdx, bitsPerChar, name) {
// Count the padding bytes:
let end = string.length;
while (string[end - 1] === '=') {
--end;
}
// Allocate the output:
const out = new Uint8Array((end * bitsPerChar / 8) | 0);
// Parse the data:
let bits = 0; // Number of bits currently in the buffer
let buffer = 0; // Bits waiting to be written out, MSB first
let written = 0; // Next byte to write
for (let i = 0; i < end; ++i) {
// Read one character from the string:
const value = alphabetIdx[string[i]];
if (value === undefined) {
throw new SyntaxError(`Non-${name} character`);
}
// Append the bits to the buffer:
buffer = (buffer << bitsPerChar) | value;
bits += bitsPerChar;
// Write out some bits if the buffer has a byte's worth:
if (bits >= 8) {
bits -= 8;
out[written++] = 0xff & (buffer >> bits);
}
}
// Verify that we have received just enough bits:
if (bits >= bitsPerChar || (0xff & (buffer << (8 - bits))) !== 0) {
throw new SyntaxError('Unexpected end of data');
}
return out;
}
function encode$1(data, alphabet, bitsPerChar) {
const pad = alphabet[alphabet.length - 1] === '=';
const mask = (1 << bitsPerChar) - 1;
let out = '';
let bits = 0; // Number of bits currently in the buffer
let buffer = 0; // Bits waiting to be written out, MSB first
for (let i = 0; i < data.length; ++i) {
// Slurp data into the buffer:
buffer = (buffer << 8) | data[i];
bits += 8;
// Write out as much as we can:
while (bits > bitsPerChar) {
bits -= bitsPerChar;
out += alphabet[mask & (buffer >> bits)];
}
}
// Partial character:
if (bits !== 0) {
out += alphabet[mask & (buffer << (bitsPerChar - bits))];
}
// Add padding characters until we hit a byte boundary:
if (pad) {
while (((out.length * bitsPerChar) & 7) !== 0) {
out += '=';
}
}
return out;
}
function createAlphabetIdx(alphabet) {
// Build the character lookup table:
const alphabetIdx = {};
for (let i = 0; i < alphabet.length; ++i) {
alphabetIdx[alphabet[i]] = i;
}
return alphabetIdx;
}
/**
* RFC4648 Factory
*/
function rfc4648({ name, prefix, bitsPerChar, alphabet }) {
const alphabetIdx = createAlphabetIdx(alphabet);
return from({
prefix,
name,
encode(input) {
return encode$1(input, alphabet, bitsPerChar);
},
decode(input) {
return decode$3(input, alphabetIdx, bitsPerChar, name);
}
});
}
const base32 = rfc4648({
prefix: 'b',
name: 'base32',
alphabet: 'abcdefghijklmnopqrstuvwxyz234567',
bitsPerChar: 5
});
rfc4648({
prefix: 'B',
name: 'base32upper',
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567',
bitsPerChar: 5
});
rfc4648({
prefix: 'c',
name: 'base32pad',
alphabet: 'abcdefghijklmnopqrstuvwxyz234567=',
bitsPerChar: 5
});
rfc4648({
prefix: 'C',
name: 'base32padupper',
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=',
bitsPerChar: 5
});
rfc4648({
prefix: 'v',
name: 'base32hex',
alphabet: '0123456789abcdefghijklmnopqrstuv',
bitsPerChar: 5
});
rfc4648({
prefix: 'V',
name: 'base32hexupper',
alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUV',
bitsPerChar: 5
});
rfc4648({
prefix: 't',
name: 'base32hexpad',
alphabet: '0123456789abcdefghijklmnopqrstuv=',
bitsPerChar: 5
});
rfc4648({
prefix: 'T',
name: 'base32hexpadupper',
alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUV=',
bitsPerChar: 5
});
rfc4648({
prefix: 'h',
name: 'base32z',
alphabet: 'ybndrfg8ejkmcpqxot1uwisza345h769',
bitsPerChar: 5
});
const base36 = baseX({
prefix: 'k',
name: 'base36',
alphabet: '0123456789abcdefghijklmnopqrstuvwxyz'
});
baseX({
prefix: 'K',
name: 'base36upper',
alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
});
const base58btc = baseX({
name: 'base58btc',
prefix: 'z',
alphabet: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
});
baseX({
name: 'base58flickr',
prefix: 'Z',
alphabet: '123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ'
});
/* eslint-disable */
var encode_1 = encode;
var MSB = 0x80, MSBALL = -128, INT = Math.pow(2, 31);
/**
* @param {number} num
* @param {number[]} out
* @param {number} offset
*/
function encode(num, out, offset) {
out = out || [];
offset = offset || 0;
var oldOffset = offset;
while (num >= INT) {
out[offset++] = (num & 0xFF) | MSB;
num /= 128;
}
while (num & MSBALL) {
out[offset++] = (num & 0xFF) | MSB;
num >>>= 7;
}
out[offset] = num | 0;
// @ts-ignore
encode.bytes = offset - oldOffset + 1;
return out;
}
var decode$2 = read;
var MSB$1 = 0x80, REST$1 = 0x7F;
/**
* @param {string | any[]} buf
* @param {number} offset
*/
function read(buf, offset) {
var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf.length;
do {
if (counter >= l) {
// @ts-ignore
read.bytes = 0;
throw new RangeError('Could not decode varint');
}
b = buf[counter++];
res += shift < 28
? (b & REST$1) << shift
: (b & REST$1) * Math.pow(2, shift);
shift += 7;
} while (b >= MSB$1);
// @ts-ignore
read.bytes = counter - offset;
return res;
}
var N1 = Math.pow(2, 7);
var N2 = Math.pow(2, 14);
var N3 = Math.pow(2, 21);
var N4 = Math.pow(2, 28);
var N5 = Math.pow(2, 35);
var N6 = Math.pow(2, 42);
var N7 = Math.pow(2, 49);
var N8 = Math.pow(2, 56);
var N9 = Math.pow(2, 63);
var length = function (/** @type {number} */ value) {
return (value < N1 ? 1
: value < N2 ? 2
: value < N3 ? 3
: value < N4 ? 4
: value < N5 ? 5
: value < N6 ? 6
: value < N7 ? 7
: value < N8 ? 8
: value < N9 ? 9
: 10);
};
var varint = {
encode: encode_1,
decode: decode$2,
encodingLength: length
};
var _brrp_varint = varint;
function decode$1(data, offset = 0) {
const code = _brrp_varint.decode(data, offset);
return [code, _brrp_varint.decode.bytes];
}
function encodeTo(int, target, offset = 0) {
_brrp_varint.encode(int, target, offset);
return target;
}
function encodingLength(int) {
return _brrp_varint.encodingLength(int);
}
/**
* Creates a multihash digest.
*/
function create(code, digest) {
const size = digest.byteLength;
const sizeOffset = encodingLength(code);
const digestOffset = sizeOffset + encodingLength(size);
const bytes = new Uint8Array(digestOffset + size);
encodeTo(code, bytes, 0);
encodeTo(size, bytes, sizeOffset);
bytes.set(digest, digestOffset);
return new Digest(code, size, digest, bytes);
}
/**
* Turns bytes representation of multihash digest into an instance.
*/
function decode(multihash) {
const bytes = coerce(multihash);
const [code, sizeOffset] = decode$1(bytes);
const [size, digestOffset] = decode$1(bytes.subarray(sizeOffset));
const digest = bytes.subarray(sizeOffset + digestOffset);
if (digest.byteLength !== size) {
throw new Error('Incorrect length');
}
return new Digest(code, size, digest, bytes);
}
function equals(a, b) {
if (a === b) {
return true;
}
else {
const data = b;
return (a.code === data.code &&
a.size === data.size &&
data.bytes instanceof Uint8Array &&
equals$1(a.bytes, data.bytes));
}
}
/**
* Represents a multihash digest which carries information about the
* hashing algorithm and an actual hash digest.
*/
class Digest {
code;
size;
digest;
bytes;
/**
* Creates a multihash digest.
*/
constructor(code, size, digest, bytes) {
this.code = code;
this.size = size;
this.digest = digest;
this.bytes = bytes;
}
}
function format(link, base) {
const { bytes, version } = link;
switch (version) {
case 0:
return toStringV0(bytes, baseCache(link), base ?? base58btc.encoder);
default:
return toStringV1(bytes, baseCache(link), (base ?? base32.encoder));
}
}
const cache = new WeakMap();
function baseCache(cid) {
const baseCache = cache.get(cid);
if (baseCache == null) {
const baseCache = new Map();
cache.set(cid, baseCache);
return baseCache;
}
return baseCache;
}
class CID {
code;
version;
multihash;
bytes;
'/';
/**
* @param version - Version of the CID
* @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
* @param multihash - (Multi)hash of the of the content.
*/
constructor(version, code, multihash, bytes) {
this.code = code;
this.version = version;
this.multihash = multihash;
this.bytes = bytes;
// flag to serializers that this is a CID and
// should be treated specially
this['/'] = bytes;
}
/**
* Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
* please either use `CID.asCID(cid)` or switch to new signalling mechanism
*
* @deprecated
*/
get asCID() {
return this;
}
// ArrayBufferView
get byteOffset() {
return this.bytes.byteOffset;
}
// ArrayBufferView
get byteLength() {
return this.bytes.byteLength;
}
toV0() {
switch (this.version) {
case 0: {
return this;
}
case 1: {
const { code, multihash } = this;
if (code !== DAG_PB_CODE) {
throw new Error('Cannot convert a non dag-pb CID to CIDv0');
}
// sha2-256
if (multihash.code !== SHA_256_CODE) {
throw new Error('Cannot convert non sha2-256 multihash CID to CIDv0');
}
return (CID.createV0(multihash));
}
default: {
throw Error(`Can not convert CID version ${this.version} to version 0. This is a bug please report`);
}
}
}
toV1() {
switch (this.version) {
case 0: {
const { code, digest } = this.multihash;
const multihash = create(code, digest);
return (CID.createV1(this.code, multihash));
}
case 1: {
return this;
}
default: {
throw Error(`Can not convert CID version ${this.version} to version 1. This is a bug please report`);
}
}
}
equals(other) {
return CID.equals(this, other);
}
static equals(self, other) {
const unknown = other;
return (unknown != null &&
self.code === unknown.code &&
self.version === unknown.version &&
equals(self.multihash, unknown.multihash));
}
toString(base) {
return format(this, base);
}
toJSON() {
return { '/': format(this) };
}
link() {
return this;
}
[Symbol.toStringTag] = 'CID';
// Legacy
[Symbol.for('nodejs.util.inspect.custom')]() {
return `CID(${this.toString()})`;
}
/**
* Takes any input `value` and returns a `CID` instance if it was
* a `CID` otherwise returns `null`. If `value` is instanceof `CID`
* it will return value back. If `value` is not instance of this CID
* class, but is compatible CID it will return new instance of this
* `CID` class. Otherwise returns null.
*
* This allows two different incompatible versions of CID library to
* co-exist and interop as long as binary interface is compatible.
*/
static asCID(input) {
if (input == null) {
return null;
}
const value = input;
if (value instanceof CID) {
// If value is instance of CID then we're all set.
return value;
}
else if ((value['/'] != null && value['/'] === value.bytes) || value.asCID === value) {
// If value isn't instance of this CID class but `this.asCID === this` or
// `value['/'] === value.bytes` is true it is CID instance coming from a
// different implementation (diff version or duplicate). In that case we
// rebase it to this `CID` implementation so caller is guaranteed to get
// instance with expected API.
const { version, code, multihash, bytes } = value;
return new CID(version, code, multihash, bytes ?? encodeCID(version, code, multihash.bytes));
}
else if (value[cidSymbol] === true) {
// If value is a CID from older implementation that used to be tagged via
// symbol we still rebase it to the this `CID` implementation by
// delegating that to a constructor.
const { version, multihash, code } = value;
const digest = decode(multihash);
return CID.create(version, code, digest);
}
else {
// Otherwise value is not a CID (or an incompatible version of it) in
// which case we return `null`.
return null;
}
}
/**
* @param version - Version of the CID
* @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
* @param digest - (Multi)hash of the of the content.
*/
static create(version, code, digest) {
if (typeof code !== 'number') {
throw new Error('String codecs are no longer supported');
}
if (!(digest.bytes instanceof Uint8Array)) {
throw new Error('Invalid digest');
}
switch (version) {
case 0: {
if (code !== DAG_PB_CODE) {
throw new Error(`Version 0 CID must use dag-pb (code: ${DAG_PB_CODE}) block encoding`);
}
else {
return new CID(version, code, digest, digest.bytes);
}
}
case 1: {
const bytes = encodeCID(version, code, digest.bytes);
return new CID(version, code, digest, bytes);
}
default: {
throw new Error('Invalid version');
}
}
}
/**
* Simplified version of `create` for CIDv0.
*/
static createV0(digest) {
return CID.create(0, DAG_PB_CODE, digest);
}
/**
* Simplified version of `create` for CIDv1.
*
* @param code - Content encoding format code.
* @param digest - Multihash of the content.
*/
static createV1(code, digest) {
return CID.create(1, code, digest);
}
/**
* Decoded a CID from its binary representation. The byte array must contain
* only the CID with no additional bytes.
*
* An error will be thrown if the bytes provided do not contain a valid
* binary representation of a CID.
*/
static decode(bytes) {
const [cid, remainder] = CID.decodeFirst(bytes);
if (remainder.length !== 0) {
throw new Error('Incorrect length');
}
return cid;
}
/**
* Decoded a CID from its binary representation at the beginning of a byte
* array.
*
* Returns an array with the first element containing the CID and the second
* element containing the remainder of the original byte array. The remainder
* will be a zero-length byte array if the provided bytes only contained a
* binary CID representation.
*/
static decodeFirst(bytes) {
const specs = CID.inspectBytes(bytes);
const prefixSize = specs.size - specs.multihashSize;
const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize));
if (multihashBytes.byteLength !== specs.multihashSize) {
throw new Error('Incorrect length');
}
const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize);
const digest = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes);
const cid = specs.version === 0
? CID.createV0(digest)
: CID.createV1(specs.codec, digest);
return [cid, bytes.subarray(specs.size)];
}
/**
* Inspect the initial bytes of a CID to determine its properties.
*
* Involves decoding up to 4 varints. Typically this will require only 4 to 6
* bytes but for larger multicodec code values and larger multihash digest
* lengths these varints can be quite large. It is recommended that at least
* 10 bytes be made available in the `initialBytes` argument for a complete
* inspection.
*/
static inspectBytes(initialBytes) {
let offset = 0;
const next = () => {
const [i, length] = decode$1(initialBytes.subarray(offset));
offset += length;
return i;
};
let version = next();
let codec = DAG_PB_CODE;
if (version === 18) {
// CIDv0
version = 0;
offset = 0;
}
else {
codec = next();
}
if (version !== 0 && version !== 1) {
throw new RangeError(`Invalid CID version ${version}`);
}
const prefixSize = offset;
const multihashCode = next(); // multihash code
const digestSize = next(); // multihash length
const size = offset + digestSize;
const multihashSize = size - prefixSize;
return { version, codec, multihashCode, digestSize, multihashSize, size };
}
/**
* Takes cid in a string representation and creates an instance. If `base`
* decoder is not provided will use a default from the configuration. It will
* throw an error if encoding of the CID is not compatible with supplied (or
* a default decoder).
*/
static parse(source, base) {
const [prefix, bytes] = parseCIDtoBytes(source, base);
const cid = CID.decode(bytes);
if (cid.version === 0 && source[0] !== 'Q') {
throw Error('Version 0 CID string must not include multibase prefix');
}
// Cache string representation to avoid computing it on `this.toString()`
baseCache(cid).set(prefix, source);
return cid;
}
}
function parseCIDtoBytes(source, base) {
switch (source[0]) {
// CIDv0 is parsed differently
case 'Q': {
const decoder = base ?? base58btc;
return [
base58btc.prefix,
decoder.decode(`${base58btc.prefix}${source}`)
];
}
case base58btc.prefix: {
const decoder = base ?? base58btc;
return [base58btc.prefix, decoder.decode(source)];
}
case base32.prefix: {
const decoder = base ?? base32;
return [base32.prefix, decoder.decode(source)];
}
case base36.prefix: {
const decoder = base ?? base36;
return [base36.prefix, decoder.decode(source)];
}
default: {
if (base == null) {
throw Error('To parse non base32, base36 or base58btc encoded CID multibase decoder must be provided');
}
return [source[0], base.decode(source)];
}
}
}
function toStringV0(bytes, cache, base) {
const { prefix } = base;
if (prefix !== base58btc.prefix) {
throw Error(`Cannot string encode V0 in ${base.name} encoding`);
}
const cid = cache.get(prefix);
if (cid == null) {
const cid = base.encode(bytes).slice(1);
cache.set(prefix, cid);
return cid;
}
else {
return cid;
}
}
function toStringV1(bytes, cache, base) {
const { prefix } = base;
const cid = cache.get(prefix);
if (cid == null) {
const cid = base.encode(bytes);
cache.set(prefix, cid);
return cid;
}
else {
return cid;
}
}
const DAG_PB_CODE = 0x70;
const SHA_256_CODE = 0x12;
function encodeCID(version, code, multihash) {
const codeOffset = encodingLength(version);
const hashOffset = codeOffset + encodingLength(code);
const bytes = new Uint8Array(hashOffset + multihash.byteLength);
encodeTo(version, bytes, 0);
encodeTo(code, bytes, codeOffset);
bytes.set(multihash, hashOffset);
return bytes;
}
const cidSymbol = Symbol.for('@ipld/js-cid/CID');
/**
* IPFS Service for handling off-chain storage of PoD Protocol data
* Integrates with ZK compression for cost-effective data management
* Uses Helia (modern IPFS implementation) instead of deprecated js-IPFS
*/
class IPFSService extends base$1.BaseService {
constructor(rpcUrl, programId, commitment, config = {}) {
super(rpcUrl, programId, commitment);
this.helia = null;
this.fs = null;
this.jsonStore = null;
this.initPromise = null;
this.config = {
disabled: false,
timeout: 30000,
gatewayUrl: 'https://ipfs.io/ipfs',
...config
};
}
/**
* Initialize Helia node and services
*/
async init() {
if (this.initPromise) {
return this.initPromise;
}
this.initPromise = (async () => {
try {
// Check if IPFS is disabled
if (this.config.disabled) {
console.warn('IPFS functionality is disabled');
return; // Skip initialization
}
// Create Helia with minimal configuration for Node.js CLI environments
const config = this.config.heliaConfig || {};
// Try to create Helia with fallback error handling
try {
// Dynamic imports to avoid immediate native module loading
const { createHelia } = await import('helia');
const { unixfs } = await import('@helia/unixfs');
const { json } = await import('@helia/json');
this.helia = await createHelia(config);
this.fs = unixfs(this.helia);
this.jsonStore = json(this.helia);
}
catch (nativeModuleError) {
// If native modules fail, throw a more specific error
console.warn('Warning: Native IPFS modules unavailable, IPFS features will be disabled');
throw new Error(`IPFS functionality requires native modules: ${nativeModuleError.message}`);
}
}
catch (error) {
throw new Error(`Failed to initialize Helia: ${error}`);
}
})();
return this.initPromise;
}
/**
* Ensure Helia is initialized
*/
async ensureIPFSInitialized() {
if (this.config.disabled) {
throw new Error('IPFS functionality is disabled');
}
await this.init();
}
/**
* Store channel message content on IPFS
*/
async storeMessageContent(content, attachments = [], metadata = {}) {
const messageContent = {
content,
attachments,
metadata,
timestamp: Date.now(),
version: '1.0.0',
};
return this.storeJSON(messageContent);
}
/**
* Store participant extended metadata on IPFS
*/
async storeParticipantMetadata(displayName, avatar, permissions = [], customData = {}) {
const participantMetadata = {
displayName,
avatar,
permissions,
customData,
lastUpdated: Date.now(),
};
return this.storeJSON(participantMetadata);
}
/**
* Store arbitrary JSON data on IPFS
*/
async storeJSON(data) {
try {
await this.ensureIPFSInitialized();
const cid = await this.jsonStore.add(data);
// Get size by encoding the data
const jsonString = JSON.stringify(data);
const size = new TextEncoder().encode(jsonString).length;
return {
hash: cid,
cid,
size,
url: `${this.config.gatewayUrl}/${cid}`,
};
}
catch (error) {
throw new Error(`Failed to store data on IPFS: ${error}`);
}
}
/**
* Store raw file data on IPFS
*/
async storeFile(data, filename) {
try {
await this.ensureIPFSInitialized();
const cid = await this.fs.addBytes(data);
return {
hash: cid,
cid,
size: data.length,
url: `${this.config.gatewayUrl}/${cid}`,
};
}
catch (error) {
throw new Error(`Failed to store file on IPFS: ${error}`);
}
}
/**
* Retrieve JSON data from IPFS
*/
async retrieveJSON(hash) {
try {
await this.ensureIPFSInitialized();
const cid = CID.parse(hash);
const data = await this.jsonStore.get(cid);
return data;
}
catch (error) {
throw new Error(`Failed to retrieve data from IPFS: ${error}`);
}
}
/**
* Retrieve message content from IPFS
*/
async retrieveMessageContent(hash) {
return this.retrieveJSON(hash);
}
/**
* Retrieve participant metadata from IPFS
*/
async retrieveParticipantMetadata(hash) {
return this.retrieveJSON(hash);
}
/**
* Retrieve raw file data from IPFS
*/
async retrieveFile(hash) {
try {
await this.ensureIPFSInitialized();
const cid = CID.parse(hash);
const data = await this.fs.cat(cid);
const chunks = [];
for await (const chunk of data) {
chunks.push(chunk);
}
return Buffer.concat(chunks);
}
catch (error) {
throw new Error(`Failed to retrieve file from IPFS: ${error}`);
}
}
/**
* Pin content to ensure it stays available
*/
async pinContent(hash) {
try {
await this.ensureIPFSInitialized();
const cid = CID.parse(hash);
await this.helia.pins.add(cid);
}
catch (error) {
throw new Error(`Failed to pin content: ${error}`);
}
}
/**
* Unpin content to allow garbage collection
*/
async unpinContent(hash) {
try {
await this.ensureIPFSInitialized();
const cid = CID.parse(hash);
await this.helia.pins.rm(cid);
}
catch (error) {
throw new Error(`Failed to unpin content: ${error}`);
}
}
/**
* Get IPFS node info
*/
async getNodeInfo() {
try {
await this.ensureIPFSInitialized();
return {
id: this.helia.libp2p.peerId,
agentVersion: 'helia',
protocolVersion: '1.0.0'
};
}
catch (error) {
throw new Error(`Failed to get IPFS node info: ${error}`);
}
}
/**
* Check if content exists on IPFS
*/
async contentExists(hash) {
try {
await this.ensureIPFSInitialized();
const cid = CID.parse(hash);
await this.fs.stat(cid);
return true;
}
catch (error) {
return false;
}
}
/**
* Store channel message content on IPFS
*/
async storeChannelMessageContent(content) {
return this.storeJSON(content);
}
/**
* Store participant extended metadata on IPFS
*/
async storeParticipantExtendedMetadata(metadata) {
return this.storeJSON(metadata);
}
/**
* Retrieve channel message content from IPFS
*/
async retrieveChannelMessageContent(hash) {
return this.retrieveJSON(hash);
}
/**
* Retrieve participant extended metadata from IPFS
*/
async retrieveParticipantExtendedMetadata(hash) {
return this.retrieveJSON(hash);
}
/**
* Cleanup resources
*/
async stop() {
if (this.helia) {
await this.helia.stop();
this.helia = null;
this.fs = null;
this.jsonStore = null;
}
}
/**
* Create a content hash for verification
* Matches the Rust program's hash_to_bn254_field_size_be function
*/
static createContentHash(content) {
// Equivalent to `hash_to_bn254_field_size_be` in Rust:
// https://github.com/Lightprotocol/light-protocol/blob/main/program-libs/hasher/src/hash_to_field_size.rs#L91
// 1. Hash the UTF-8 bytes with Keccak256 and a bump seed (0xff).
const keccakHash = keccak('keccak256')
.update(Buffer.concat([Buffer.from(content, 'utf8'), Buffer.from([0xff])]))
.digest();
// 2. Zero the first byte so the result fits within the BN254 field.
const fieldSizedHash = Buffer.from(keccakHash);
fieldSizedHash[0] = 0;
return fieldSizedHash.toString('hex');
}
/**
* Create a metadata hash for participant data
* Matches the Rust program's metadata hashing
*/
static createMetadataHash(metadata) {
const metadataString = JSON.stringify({
displayName: metadata.displayName || '',
avatar: metadata.avatar || '',
permissions: metadata.permissions || [],
lastUpdated: metadata.lastUpdated
});
return this.createContentHash(metadataString);
}
/**
* Batch store multiple content items
*/
async batchStore(items) {
const results = [];
for (const item of items) {
if (typeof item.content === 'string' || Buffer.isBuffer(item.content)) {
results.push(await this.storeFile(Buffer.isBuffer(item.content) ? item.content : Buffer.from(item.content), item.filename));
}
else {
results.push(await this.storeJSON(item.content));
}
}
return results;
}
/**
* Get gateway URL for content
*/
getGatewayUrl(hash, gateway = 'https://ipfs.io/ipfs/') {
return `${gateway}${hash}`;
}
/**
* Validate IPFS hash format
*/
static isValidIPFSHash(hash) {
try {
CID.parse(hash);
return true;
}
catch {
return false;
}
}
/**
* Get content from IPFS (wrapper for retrieveJSON)
* @param hash IPFS hash to retrieve
* @returns Content data
*/
async getContent(hash) {
try {
return await this.retrieveJSON(hash);
}
catch (error) {
// If JSON retrieval fails, try to get as raw file
try {
const buffer = await this.retrieveFile(hash);
return buffer.toString('utf8');
}
catch (fileError) {
throw new Error(`Failed to get content from IPFS: ${error}`);
}
}
}
/**
* Create a hash for content (wrapper for createContentHash)
* @param content Content to hash
* @returns Hash string
*/
createHash(content) {
return IPFSService.createContentHash(content);
}
/**
* Get IPFS service information
* @returns Service information
*/
async getInfo() {
try {
const nodeInfo = await this.getNodeInfo();
return {
version: nodeInfo?.agentVersion || 'helia',
status: this.config.disabled ? 'disabled' : 'active',
nodes: 1,
storage: {
used: 0, // Would need to implement actual storage calculation
available: -1 // Unlimited for now
}
};
}
catch (error) {
return {
version: 'unknown',
status: this.config.disabled ? 'disabled' : 'error',
nodes: 0,
storage: {
used: 0,
available: 0
}
};
}
}
}
exports.IPFSService = IPFSService;
//# sourceMappingURL=ipfs.js.map