@web5/common
Version:
1,666 lines (1,646 loc) • 73.5 kB
JavaScript
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from2, except, desc) => {
if (from2 && typeof from2 === "object" || typeof from2 === "function") {
for (let key of __getOwnPropNames(from2))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from2[key], enumerable: !(desc = __getOwnPropDesc(from2, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/index.ts
var src_exports = {};
__export(src_exports, {
Convert: () => Convert,
LevelStore: () => LevelStore,
MemoryStore: () => MemoryStore,
Multicodec: () => Multicodec,
NodeStream: () => NodeStream,
Readable: () => import_readable_stream2.Readable,
Stream: () => Stream,
TtlCache: () => import_ttlcache.default,
Web5LogLevel: () => Web5LogLevel,
isArrayBufferSlice: () => isArrayBufferSlice,
isAsyncIterable: () => isAsyncIterable,
isDefined: () => isDefined,
isEmptyObject: () => isEmptyObject,
logger: () => logger,
removeEmptyObjects: () => removeEmptyObjects,
removeUndefinedProperties: () => removeUndefinedProperties,
universalTypeOf: () => universalTypeOf
});
module.exports = __toCommonJS(src_exports);
// src/cache.ts
var import_ttlcache = __toESM(require("@isaacs/ttlcache"), 1);
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/bytes.js
var empty = new Uint8Array(0);
function equals(aa, bb) {
if (aa === bb)
return true;
if (aa.byteLength !== bb.byteLength) {
return false;
}
for (let ii = 0; ii < aa.byteLength; ii++) {
if (aa[ii] !== bb[ii]) {
return false;
}
}
return true;
}
function coerce(o) {
if (o instanceof Uint8Array && o.constructor.name === "Uint8Array")
return o;
if (o instanceof ArrayBuffer)
return new Uint8Array(o);
if (ArrayBuffer.isView(o)) {
return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
}
throw new Error("Unknown type, must be binary type");
}
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/vendor/base-x.js
function base(ALPHABET, name) {
if (ALPHABET.length >= 255) {
throw new TypeError("Alphabet too long");
}
var BASE_MAP = new Uint8Array(256);
for (var j = 0; j < BASE_MAP.length; j++) {
BASE_MAP[j] = 255;
}
for (var i = 0; i < ALPHABET.length; i++) {
var x = ALPHABET.charAt(i);
var xc = x.charCodeAt(0);
if (BASE_MAP[xc] !== 255) {
throw new TypeError(x + " is ambiguous");
}
BASE_MAP[xc] = i;
}
var BASE = ALPHABET.length;
var LEADER = ALPHABET.charAt(0);
var FACTOR = Math.log(BASE) / Math.log(256);
var iFACTOR = Math.log(256) / Math.log(BASE);
function encode3(source) {
if (source instanceof Uint8Array)
;
else if (ArrayBuffer.isView(source)) {
source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
} else if (Array.isArray(source)) {
source = Uint8Array.from(source);
}
if (!(source instanceof Uint8Array)) {
throw new TypeError("Expected Uint8Array");
}
if (source.length === 0) {
return "";
}
var zeroes = 0;
var length2 = 0;
var pbegin = 0;
var pend = source.length;
while (pbegin !== pend && source[pbegin] === 0) {
pbegin++;
zeroes++;
}
var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
var b58 = new Uint8Array(size);
while (pbegin !== pend) {
var carry = source[pbegin];
var i2 = 0;
for (var it1 = size - 1; (carry !== 0 || i2 < length2) && it1 !== -1; it1--, i2++) {
carry += 256 * b58[it1] >>> 0;
b58[it1] = carry % BASE >>> 0;
carry = carry / BASE >>> 0;
}
if (carry !== 0) {
throw new Error("Non-zero carry");
}
length2 = i2;
pbegin++;
}
var it2 = size - length2;
while (it2 !== size && b58[it2] === 0) {
it2++;
}
var str = LEADER.repeat(zeroes);
for (; it2 < size; ++it2) {
str += ALPHABET.charAt(b58[it2]);
}
return str;
}
function decodeUnsafe(source) {
if (typeof source !== "string") {
throw new TypeError("Expected String");
}
if (source.length === 0) {
return new Uint8Array();
}
var psz = 0;
if (source[psz] === " ") {
return;
}
var zeroes = 0;
var length2 = 0;
while (source[psz] === LEADER) {
zeroes++;
psz++;
}
var size = (source.length - psz) * FACTOR + 1 >>> 0;
var b256 = new Uint8Array(size);
while (source[psz]) {
var carry = BASE_MAP[source.charCodeAt(psz)];
if (carry === 255) {
return;
}
var i2 = 0;
for (var it3 = size - 1; (carry !== 0 || i2 < length2) && it3 !== -1; it3--, i2++) {
carry += BASE * b256[it3] >>> 0;
b256[it3] = carry % 256 >>> 0;
carry = carry / 256 >>> 0;
}
if (carry !== 0) {
throw new Error("Non-zero carry");
}
length2 = i2;
psz++;
}
if (source[psz] === " ") {
return;
}
var it4 = size - length2;
while (it4 !== size && b256[it4] === 0) {
it4++;
}
var vch = new Uint8Array(zeroes + (size - it4));
var j2 = zeroes;
while (it4 !== size) {
vch[j2++] = b256[it4++];
}
return vch;
}
function decode5(string) {
var buffer = decodeUnsafe(string);
if (buffer) {
return buffer;
}
throw new Error(`Non-${name} character`);
}
return {
encode: encode3,
decodeUnsafe,
decode: decode5
};
}
var src = base;
var _brrp__multiformats_scope_baseX = src;
var base_x_default = _brrp__multiformats_scope_baseX;
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/bases/base.js
var Encoder = class {
name;
prefix;
baseEncode;
constructor(name, prefix, baseEncode) {
this.name = name;
this.prefix = prefix;
this.baseEncode = baseEncode;
}
encode(bytes) {
if (bytes instanceof Uint8Array) {
return `${this.prefix}${this.baseEncode(bytes)}`;
} else {
throw Error("Unknown type, must be binary type");
}
}
};
var Decoder = class {
name;
prefix;
baseDecode;
prefixCodePoint;
constructor(name, prefix, baseDecode) {
this.name = name;
this.prefix = prefix;
if (prefix.codePointAt(0) === void 0) {
throw new Error("Invalid prefix character");
}
this.prefixCodePoint = prefix.codePointAt(0);
this.baseDecode = baseDecode;
}
decode(text) {
if (typeof text === "string") {
if (text.codePointAt(0) !== this.prefixCodePoint) {
throw Error(`Unable to decode multibase string ${JSON.stringify(text)}, ${this.name} decoder only supports inputs prefixed with ${this.prefix}`);
}
return this.baseDecode(text.slice(this.prefix.length));
} else {
throw Error("Can only multibase decode strings");
}
}
or(decoder) {
return or(this, decoder);
}
};
var ComposedDecoder = class {
decoders;
constructor(decoders) {
this.decoders = decoders;
}
or(decoder) {
return or(this, decoder);
}
decode(input) {
const prefix = input[0];
const decoder = this.decoders[prefix];
if (decoder != null) {
return decoder.decode(input);
} else {
throw RangeError(`Unable to decode multibase string ${JSON.stringify(input)}, only inputs prefixed with ${Object.keys(this.decoders)} are supported`);
}
}
};
function or(left, right) {
return new ComposedDecoder({
...left.decoders ?? { [left.prefix]: left },
...right.decoders ?? { [right.prefix]: right }
});
}
var Codec = class {
name;
prefix;
baseEncode;
baseDecode;
encoder;
decoder;
constructor(name, prefix, baseEncode, baseDecode) {
this.name = name;
this.prefix = prefix;
this.baseEncode = baseEncode;
this.baseDecode = baseDecode;
this.encoder = new Encoder(name, prefix, baseEncode);
this.decoder = new Decoder(name, prefix, baseDecode);
}
encode(input) {
return this.encoder.encode(input);
}
decode(input) {
return this.decoder.decode(input);
}
};
function from({ name, prefix, encode: encode3, decode: decode5 }) {
return new Codec(name, prefix, encode3, decode5);
}
function baseX({ name, prefix, alphabet }) {
const { encode: encode3, decode: decode5 } = base_x_default(alphabet, name);
return from({
prefix,
name,
encode: encode3,
decode: (text) => coerce(decode5(text))
});
}
function decode(string, alphabet, bitsPerChar, name) {
const codes = {};
for (let i = 0; i < alphabet.length; ++i) {
codes[alphabet[i]] = i;
}
let end = string.length;
while (string[end - 1] === "=") {
--end;
}
const out = new Uint8Array(end * bitsPerChar / 8 | 0);
let bits = 0;
let buffer = 0;
let written = 0;
for (let i = 0; i < end; ++i) {
const value = codes[string[i]];
if (value === void 0) {
throw new SyntaxError(`Non-${name} character`);
}
buffer = buffer << bitsPerChar | value;
bits += bitsPerChar;
if (bits >= 8) {
bits -= 8;
out[written++] = 255 & buffer >> bits;
}
}
if (bits >= bitsPerChar || (255 & buffer << 8 - bits) !== 0) {
throw new SyntaxError("Unexpected end of data");
}
return out;
}
function encode(data, alphabet, bitsPerChar) {
const pad = alphabet[alphabet.length - 1] === "=";
const mask = (1 << bitsPerChar) - 1;
let out = "";
let bits = 0;
let buffer = 0;
for (let i = 0; i < data.length; ++i) {
buffer = buffer << 8 | data[i];
bits += 8;
while (bits > bitsPerChar) {
bits -= bitsPerChar;
out += alphabet[mask & buffer >> bits];
}
}
if (bits !== 0) {
out += alphabet[mask & buffer << bitsPerChar - bits];
}
if (pad) {
while ((out.length * bitsPerChar & 7) !== 0) {
out += "=";
}
}
return out;
}
function rfc4648({ name, prefix, bitsPerChar, alphabet }) {
return from({
prefix,
name,
encode(input) {
return encode(input, alphabet, bitsPerChar);
},
decode(input) {
return decode(input, alphabet, bitsPerChar, name);
}
});
}
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/bases/base32.js
var base32 = rfc4648({
prefix: "b",
name: "base32",
alphabet: "abcdefghijklmnopqrstuvwxyz234567",
bitsPerChar: 5
});
var base32upper = rfc4648({
prefix: "B",
name: "base32upper",
alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
bitsPerChar: 5
});
var base32pad = rfc4648({
prefix: "c",
name: "base32pad",
alphabet: "abcdefghijklmnopqrstuvwxyz234567=",
bitsPerChar: 5
});
var base32padupper = rfc4648({
prefix: "C",
name: "base32padupper",
alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=",
bitsPerChar: 5
});
var base32hex = rfc4648({
prefix: "v",
name: "base32hex",
alphabet: "0123456789abcdefghijklmnopqrstuv",
bitsPerChar: 5
});
var base32hexupper = rfc4648({
prefix: "V",
name: "base32hexupper",
alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV",
bitsPerChar: 5
});
var base32hexpad = rfc4648({
prefix: "t",
name: "base32hexpad",
alphabet: "0123456789abcdefghijklmnopqrstuv=",
bitsPerChar: 5
});
var base32hexpadupper = rfc4648({
prefix: "T",
name: "base32hexpadupper",
alphabet: "0123456789ABCDEFGHIJKLMNOPQRSTUV=",
bitsPerChar: 5
});
var base32z = rfc4648({
prefix: "h",
name: "base32z",
alphabet: "ybndrfg8ejkmcpqxot1uwisza345h769",
bitsPerChar: 5
});
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/bases/base58.js
var base58btc = baseX({
name: "base58btc",
prefix: "z",
alphabet: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
});
var base58flickr = baseX({
name: "base58flickr",
prefix: "Z",
alphabet: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ"
});
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/bases/base64.js
var base64 = rfc4648({
prefix: "m",
name: "base64",
alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
bitsPerChar: 6
});
var base64pad = rfc4648({
prefix: "M",
name: "base64pad",
alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
bitsPerChar: 6
});
var base64url = rfc4648({
prefix: "u",
name: "base64url",
alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_",
bitsPerChar: 6
});
var base64urlpad = rfc4648({
prefix: "U",
name: "base64urlpad",
alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_=",
bitsPerChar: 6
});
// src/type-utils.ts
function isArrayBufferSlice(arrayBufferView) {
return arrayBufferView.byteOffset !== 0 || arrayBufferView.byteLength !== arrayBufferView.buffer.byteLength;
}
function isAsyncIterable(obj) {
if (typeof obj !== "object" || obj === null) {
return false;
}
return typeof obj[Symbol.asyncIterator] === "function";
}
function isDefined(arg) {
return arg !== null && typeof arg !== "undefined";
}
function universalTypeOf(value) {
const typeString = Object.prototype.toString.call(value);
const match = typeString.match(/\s([a-zA-Z0-9]+)/);
const [_, type] = match;
return type;
}
// src/convert.ts
var textEncoder = new TextEncoder();
var textDecoder = new TextDecoder();
var Convert = class _Convert {
constructor(data, format2) {
this.data = data;
this.format = format2;
}
static arrayBuffer(data) {
return new _Convert(data, "ArrayBuffer");
}
static asyncIterable(data) {
if (!isAsyncIterable(data)) {
throw new TypeError("Input must be of type AsyncIterable.");
}
return new _Convert(data, "AsyncIterable");
}
static base32Z(data) {
return new _Convert(data, "Base32Z");
}
static base58Btc(data) {
return new _Convert(data, "Base58Btc");
}
static base64Url(data) {
return new _Convert(data, "Base64Url");
}
/**
* Reference:
* The BufferSource type is a TypeScript type that represents an ArrayBuffer
* or one of the ArrayBufferView types, such a TypedArray (e.g., Uint8Array)
* or a DataView.
*/
static bufferSource(data) {
return new _Convert(data, "BufferSource");
}
static hex(data) {
if (typeof data !== "string") {
throw new TypeError("Hex input must be a string.");
}
if (data.length % 2 !== 0) {
throw new TypeError("Hex input must have an even number of characters.");
}
return new _Convert(data, "Hex");
}
static multibase(data) {
return new _Convert(data, "Multibase");
}
static object(data) {
return new _Convert(data, "Object");
}
static string(data) {
return new _Convert(data, "String");
}
static uint8Array(data) {
return new _Convert(data, "Uint8Array");
}
toArrayBuffer() {
switch (this.format) {
case "Base58Btc": {
return base58btc.baseDecode(this.data).buffer;
}
case "Base64Url": {
return base64url.baseDecode(this.data).buffer;
}
case "BufferSource": {
const dataType = universalTypeOf(this.data);
if (dataType === "ArrayBuffer") {
return this.data;
} else if (ArrayBuffer.isView(this.data)) {
if (isArrayBufferSlice(this.data)) {
return this.data.buffer.slice(this.data.byteOffset, this.data.byteOffset + this.data.byteLength);
} else {
return this.data.buffer;
}
} else {
throw new TypeError(`${this.format} value is not of type: ArrayBuffer, DataView, or TypedArray.`);
}
}
case "Hex": {
return this.toUint8Array().buffer;
}
case "String": {
return this.toUint8Array().buffer;
}
case "Uint8Array": {
return this.data.buffer;
}
default:
throw new TypeError(`Conversion from ${this.format} to ArrayBuffer is not supported.`);
}
}
async toArrayBufferAsync() {
switch (this.format) {
case "AsyncIterable": {
const blob = await this.toBlobAsync();
return await blob.arrayBuffer();
}
default:
throw new TypeError(`Asynchronous conversion from ${this.format} to ArrayBuffer is not supported.`);
}
}
toBase32Z() {
switch (this.format) {
case "Uint8Array": {
return base32z.baseEncode(this.data);
}
default:
throw new TypeError(`Conversion from ${this.format} to Base64Z is not supported.`);
}
}
toBase58Btc() {
switch (this.format) {
case "ArrayBuffer": {
const u8a = new Uint8Array(this.data);
return base58btc.baseEncode(u8a);
}
case "Multibase": {
return this.data.substring(1);
}
case "Uint8Array": {
return base58btc.baseEncode(this.data);
}
default:
throw new TypeError(`Conversion from ${this.format} to Base58Btc is not supported.`);
}
}
toBase64Url() {
switch (this.format) {
case "ArrayBuffer": {
const u8a = new Uint8Array(this.data);
return base64url.baseEncode(u8a);
}
case "BufferSource": {
const u8a = this.toUint8Array();
return base64url.baseEncode(u8a);
}
case "Object": {
const string = JSON.stringify(this.data);
const u8a = textEncoder.encode(string);
return base64url.baseEncode(u8a);
}
case "String": {
const u8a = textEncoder.encode(this.data);
return base64url.baseEncode(u8a);
}
case "Uint8Array": {
return base64url.baseEncode(this.data);
}
default:
throw new TypeError(`Conversion from ${this.format} to Base64Url is not supported.`);
}
}
async toBlobAsync() {
switch (this.format) {
case "AsyncIterable": {
const chunks = [];
for await (const chunk of this.data) {
chunks.push(chunk);
}
const blob = new Blob(chunks);
return blob;
}
default:
throw new TypeError(`Asynchronous conversion from ${this.format} to Blob is not supported.`);
}
}
toHex() {
const hexes = Array.from({ length: 256 }, (v, i) => i.toString(16).padStart(2, "0"));
switch (this.format) {
case "ArrayBuffer": {
const u8a = this.toUint8Array();
return _Convert.uint8Array(u8a).toHex();
}
case "Base64Url": {
const u8a = this.toUint8Array();
return _Convert.uint8Array(u8a).toHex();
}
case "Uint8Array": {
let hex = "";
for (let i = 0; i < this.data.length; i++) {
hex += hexes[this.data[i]];
}
return hex;
}
default:
throw new TypeError(`Conversion from ${this.format} to Hex is not supported.`);
}
}
toMultibase() {
switch (this.format) {
case "Base58Btc": {
return `z${this.data}`;
}
default:
throw new TypeError(`Conversion from ${this.format} to Multibase is not supported.`);
}
}
toObject() {
switch (this.format) {
case "Base64Url": {
const u8a = base64url.baseDecode(this.data);
const text = textDecoder.decode(u8a);
return JSON.parse(text);
}
case "String": {
return JSON.parse(this.data);
}
case "Uint8Array": {
const text = textDecoder.decode(this.data);
return JSON.parse(text);
}
default:
throw new TypeError(`Conversion from ${this.format} to Object is not supported.`);
}
}
async toObjectAsync() {
switch (this.format) {
case "AsyncIterable": {
const text = await this.toStringAsync();
const json = JSON.parse(text);
return json;
}
default:
throw new TypeError(`Asynchronous conversion from ${this.format} to Object is not supported.`);
}
}
toString() {
switch (this.format) {
case "ArrayBuffer": {
return textDecoder.decode(this.data);
}
case "Base64Url": {
const u8a = base64url.baseDecode(this.data);
return textDecoder.decode(u8a);
}
case "Object": {
return JSON.stringify(this.data);
}
case "Uint8Array": {
return textDecoder.decode(this.data);
}
default:
throw new TypeError(`Conversion from ${this.format} to String is not supported.`);
}
}
async toStringAsync() {
switch (this.format) {
case "AsyncIterable": {
let str = "";
for await (const chunk of this.data) {
if (typeof chunk === "string")
str += chunk;
else
str += textDecoder.decode(chunk, { stream: true });
}
str += textDecoder.decode(void 0, { stream: false });
return str;
}
default:
throw new TypeError(`Asynchronous conversion from ${this.format} to String is not supported.`);
}
}
toUint8Array() {
switch (this.format) {
case "ArrayBuffer": {
return new Uint8Array(this.data);
}
case "Base32Z": {
return base32z.baseDecode(this.data);
}
case "Base58Btc": {
return base58btc.baseDecode(this.data);
}
case "Base64Url": {
return base64url.baseDecode(this.data);
}
case "BufferSource": {
const dataType = universalTypeOf(this.data);
if (dataType === "Uint8Array") {
return this.data;
} else if (dataType === "ArrayBuffer") {
return new Uint8Array(this.data);
} else if (ArrayBuffer.isView(this.data)) {
return new Uint8Array(this.data.buffer, this.data.byteOffset, this.data.byteLength);
} else {
throw new TypeError(`${this.format} value is not of type: ArrayBuffer, DataView, or TypedArray.`);
}
}
case "Hex": {
const u8a = new Uint8Array(this.data.length / 2);
for (let i = 0; i < this.data.length; i += 2) {
const byteValue = parseInt(this.data.substring(i, i + 2), 16);
if (isNaN(byteValue)) {
throw new TypeError("Input is not a valid hexadecimal string.");
}
u8a[i / 2] = byteValue;
}
return u8a;
}
case "Object": {
const string = JSON.stringify(this.data);
return textEncoder.encode(string);
}
case "String": {
return textEncoder.encode(this.data);
}
default:
throw new TypeError(`Conversion from ${this.format} to Uint8Array is not supported.`);
}
}
async toUint8ArrayAsync() {
switch (this.format) {
case "AsyncIterable": {
const arrayBuffer = await this.toArrayBufferAsync();
return new Uint8Array(arrayBuffer);
}
default:
throw new TypeError(`Asynchronous conversion from ${this.format} to Uint8Array is not supported.`);
}
}
};
// src/logger.ts
var Web5LogLevel = /* @__PURE__ */ ((Web5LogLevel2) => {
Web5LogLevel2["Debug"] = "debug";
Web5LogLevel2["Silent"] = "silent";
return Web5LogLevel2;
})(Web5LogLevel || {});
var Web5Logger = class {
constructor() {
this.logLevel = "silent" /* Silent */;
}
// Default to silent/no-op log level
setLogLevel(logLevel) {
this.logLevel = logLevel;
}
log(message) {
this.info(message);
}
info(message) {
if (this.logLevel === "silent" /* Silent */) {
return;
}
console.info(message);
}
error(message) {
if (this.logLevel === "silent" /* Silent */) {
return;
}
console.error(message);
}
};
var logger = new Web5Logger();
if (typeof window !== "undefined") {
window.web5logger = logger;
}
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/varint.js
var varint_exports = {};
__export(varint_exports, {
decode: () => decode3,
encodeTo: () => encodeTo,
encodingLength: () => encodingLength
});
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/vendor/varint.js
var encode_1 = encode2;
var MSB = 128;
var REST = 127;
var MSBALL = ~REST;
var INT = Math.pow(2, 31);
function encode2(num, out, offset) {
out = out || [];
offset = offset || 0;
var oldOffset = offset;
while (num >= INT) {
out[offset++] = num & 255 | MSB;
num /= 128;
}
while (num & MSBALL) {
out[offset++] = num & 255 | MSB;
num >>>= 7;
}
out[offset] = num | 0;
encode2.bytes = offset - oldOffset + 1;
return out;
}
var decode2 = read;
var MSB$1 = 128;
var REST$1 = 127;
function read(buf, offset) {
var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf.length;
do {
if (counter >= l) {
read.bytes = 0;
throw new RangeError("Could not decode varint");
}
b = buf[counter++];
res += shift < 28 ? (b & REST$1) << shift : (b & REST$1) * Math.pow(2, shift);
shift += 7;
} while (b >= MSB$1);
read.bytes = counter - offset;
return res;
}
var N1 = Math.pow(2, 7);
var N2 = Math.pow(2, 14);
var N3 = Math.pow(2, 21);
var N4 = Math.pow(2, 28);
var N5 = Math.pow(2, 35);
var N6 = Math.pow(2, 42);
var N7 = Math.pow(2, 49);
var N8 = Math.pow(2, 56);
var N9 = Math.pow(2, 63);
var length = function(value) {
return value < N1 ? 1 : value < N2 ? 2 : value < N3 ? 3 : value < N4 ? 4 : value < N5 ? 5 : value < N6 ? 6 : value < N7 ? 7 : value < N8 ? 8 : value < N9 ? 9 : 10;
};
var varint = {
encode: encode_1,
decode: decode2,
encodingLength: length
};
var _brrp_varint = varint;
var varint_default = _brrp_varint;
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/varint.js
function decode3(data, offset = 0) {
const code = varint_default.decode(data, offset);
return [code, varint_default.decode.bytes];
}
function encodeTo(int, target, offset = 0) {
varint_default.encode(int, target, offset);
return target;
}
function encodingLength(int) {
return varint_default.encodingLength(int);
}
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/hashes/digest.js
function create(code, digest) {
const size = digest.byteLength;
const sizeOffset = encodingLength(code);
const digestOffset = sizeOffset + encodingLength(size);
const bytes = new Uint8Array(digestOffset + size);
encodeTo(code, bytes, 0);
encodeTo(size, bytes, sizeOffset);
bytes.set(digest, digestOffset);
return new Digest(code, size, digest, bytes);
}
function decode4(multihash) {
const bytes = coerce(multihash);
const [code, sizeOffset] = decode3(bytes);
const [size, digestOffset] = decode3(bytes.subarray(sizeOffset));
const digest = bytes.subarray(sizeOffset + digestOffset);
if (digest.byteLength !== size) {
throw new Error("Incorrect length");
}
return new Digest(code, size, digest, bytes);
}
function equals2(a, b) {
if (a === b) {
return true;
} else {
const data = b;
return a.code === data.code && a.size === data.size && data.bytes instanceof Uint8Array && equals(a.bytes, data.bytes);
}
}
var Digest = class {
code;
size;
digest;
bytes;
/**
* Creates a multihash digest.
*/
constructor(code, size, digest, bytes) {
this.code = code;
this.size = size;
this.digest = digest;
this.bytes = bytes;
}
};
// ../../node_modules/.pnpm/multiformats@13.1.0/node_modules/multiformats/dist/src/cid.js
function format(link, base2) {
const { bytes, version } = link;
switch (version) {
case 0:
return toStringV0(bytes, baseCache(link), base2 ?? base58btc.encoder);
default:
return toStringV1(bytes, baseCache(link), base2 ?? base32.encoder);
}
}
var cache = /* @__PURE__ */ new WeakMap();
function baseCache(cid) {
const baseCache2 = cache.get(cid);
if (baseCache2 == null) {
const baseCache3 = /* @__PURE__ */ new Map();
cache.set(cid, baseCache3);
return baseCache3;
}
return baseCache2;
}
var CID = class _CID {
code;
version;
multihash;
bytes;
"/";
/**
* @param version - Version of the CID
* @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
* @param multihash - (Multi)hash of the of the content.
*/
constructor(version, code, multihash, bytes) {
this.code = code;
this.version = version;
this.multihash = multihash;
this.bytes = bytes;
this["/"] = bytes;
}
/**
* Signalling `cid.asCID === cid` has been replaced with `cid['/'] === cid.bytes`
* please either use `CID.asCID(cid)` or switch to new signalling mechanism
*
* @deprecated
*/
get asCID() {
return this;
}
// ArrayBufferView
get byteOffset() {
return this.bytes.byteOffset;
}
// ArrayBufferView
get byteLength() {
return this.bytes.byteLength;
}
toV0() {
switch (this.version) {
case 0: {
return this;
}
case 1: {
const { code, multihash } = this;
if (code !== DAG_PB_CODE) {
throw new Error("Cannot convert a non dag-pb CID to CIDv0");
}
if (multihash.code !== SHA_256_CODE) {
throw new Error("Cannot convert non sha2-256 multihash CID to CIDv0");
}
return _CID.createV0(multihash);
}
default: {
throw Error(`Can not convert CID version ${this.version} to version 0. This is a bug please report`);
}
}
}
toV1() {
switch (this.version) {
case 0: {
const { code, digest } = this.multihash;
const multihash = create(code, digest);
return _CID.createV1(this.code, multihash);
}
case 1: {
return this;
}
default: {
throw Error(`Can not convert CID version ${this.version} to version 1. This is a bug please report`);
}
}
}
equals(other) {
return _CID.equals(this, other);
}
static equals(self, other) {
const unknown = other;
return unknown != null && self.code === unknown.code && self.version === unknown.version && equals2(self.multihash, unknown.multihash);
}
toString(base2) {
return format(this, base2);
}
toJSON() {
return { "/": format(this) };
}
link() {
return this;
}
[Symbol.toStringTag] = "CID";
// Legacy
[Symbol.for("nodejs.util.inspect.custom")]() {
return `CID(${this.toString()})`;
}
/**
* Takes any input `value` and returns a `CID` instance if it was
* a `CID` otherwise returns `null`. If `value` is instanceof `CID`
* it will return value back. If `value` is not instance of this CID
* class, but is compatible CID it will return new instance of this
* `CID` class. Otherwise returns null.
*
* This allows two different incompatible versions of CID library to
* co-exist and interop as long as binary interface is compatible.
*/
static asCID(input) {
if (input == null) {
return null;
}
const value = input;
if (value instanceof _CID) {
return value;
} else if (value["/"] != null && value["/"] === value.bytes || value.asCID === value) {
const { version, code, multihash, bytes } = value;
return new _CID(version, code, multihash, bytes ?? encodeCID(version, code, multihash.bytes));
} else if (value[cidSymbol] === true) {
const { version, multihash, code } = value;
const digest = decode4(multihash);
return _CID.create(version, code, digest);
} else {
return null;
}
}
/**
* @param version - Version of the CID
* @param code - Code of the codec content is encoded in, see https://github.com/multiformats/multicodec/blob/master/table.csv
* @param digest - (Multi)hash of the of the content.
*/
static create(version, code, digest) {
if (typeof code !== "number") {
throw new Error("String codecs are no longer supported");
}
if (!(digest.bytes instanceof Uint8Array)) {
throw new Error("Invalid digest");
}
switch (version) {
case 0: {
if (code !== DAG_PB_CODE) {
throw new Error(`Version 0 CID must use dag-pb (code: ${DAG_PB_CODE}) block encoding`);
} else {
return new _CID(version, code, digest, digest.bytes);
}
}
case 1: {
const bytes = encodeCID(version, code, digest.bytes);
return new _CID(version, code, digest, bytes);
}
default: {
throw new Error("Invalid version");
}
}
}
/**
* Simplified version of `create` for CIDv0.
*/
static createV0(digest) {
return _CID.create(0, DAG_PB_CODE, digest);
}
/**
* Simplified version of `create` for CIDv1.
*
* @param code - Content encoding format code.
* @param digest - Multihash of the content.
*/
static createV1(code, digest) {
return _CID.create(1, code, digest);
}
/**
* Decoded a CID from its binary representation. The byte array must contain
* only the CID with no additional bytes.
*
* An error will be thrown if the bytes provided do not contain a valid
* binary representation of a CID.
*/
static decode(bytes) {
const [cid, remainder] = _CID.decodeFirst(bytes);
if (remainder.length !== 0) {
throw new Error("Incorrect length");
}
return cid;
}
/**
* Decoded a CID from its binary representation at the beginning of a byte
* array.
*
* Returns an array with the first element containing the CID and the second
* element containing the remainder of the original byte array. The remainder
* will be a zero-length byte array if the provided bytes only contained a
* binary CID representation.
*/
static decodeFirst(bytes) {
const specs = _CID.inspectBytes(bytes);
const prefixSize = specs.size - specs.multihashSize;
const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize));
if (multihashBytes.byteLength !== specs.multihashSize) {
throw new Error("Incorrect length");
}
const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize);
const digest = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes);
const cid = specs.version === 0 ? _CID.createV0(digest) : _CID.createV1(specs.codec, digest);
return [cid, bytes.subarray(specs.size)];
}
/**
* Inspect the initial bytes of a CID to determine its properties.
*
* Involves decoding up to 4 varints. Typically this will require only 4 to 6
* bytes but for larger multicodec code values and larger multihash digest
* lengths these varints can be quite large. It is recommended that at least
* 10 bytes be made available in the `initialBytes` argument for a complete
* inspection.
*/
static inspectBytes(initialBytes) {
let offset = 0;
const next = () => {
const [i, length2] = decode3(initialBytes.subarray(offset));
offset += length2;
return i;
};
let version = next();
let codec = DAG_PB_CODE;
if (version === 18) {
version = 0;
offset = 0;
} else {
codec = next();
}
if (version !== 0 && version !== 1) {
throw new RangeError(`Invalid CID version ${version}`);
}
const prefixSize = offset;
const multihashCode = next();
const digestSize = next();
const size = offset + digestSize;
const multihashSize = size - prefixSize;
return { version, codec, multihashCode, digestSize, multihashSize, size };
}
/**
* Takes cid in a string representation and creates an instance. If `base`
* decoder is not provided will use a default from the configuration. It will
* throw an error if encoding of the CID is not compatible with supplied (or
* a default decoder).
*/
static parse(source, base2) {
const [prefix, bytes] = parseCIDtoBytes(source, base2);
const cid = _CID.decode(bytes);
if (cid.version === 0 && source[0] !== "Q") {
throw Error("Version 0 CID string must not include multibase prefix");
}
baseCache(cid).set(prefix, source);
return cid;
}
};
function parseCIDtoBytes(source, base2) {
switch (source[0]) {
case "Q": {
const decoder = base2 ?? base58btc;
return [
base58btc.prefix,
decoder.decode(`${base58btc.prefix}${source}`)
];
}
case base58btc.prefix: {
const decoder = base2 ?? base58btc;
return [base58btc.prefix, decoder.decode(source)];
}
case base32.prefix: {
const decoder = base2 ?? base32;
return [base32.prefix, decoder.decode(source)];
}
default: {
if (base2 == null) {
throw Error("To parse non base32 or base58btc encoded CID multibase decoder must be provided");
}
return [source[0], base2.decode(source)];
}
}
}
function toStringV0(bytes, cache2, base2) {
const { prefix } = base2;
if (prefix !== base58btc.prefix) {
throw Error(`Cannot string encode V0 in ${base2.name} encoding`);
}
const cid = cache2.get(prefix);
if (cid == null) {
const cid2 = base2.encode(bytes).slice(1);
cache2.set(prefix, cid2);
return cid2;
} else {
return cid;
}
}
function toStringV1(bytes, cache2, base2) {
const { prefix } = base2;
const cid = cache2.get(prefix);
if (cid == null) {
const cid2 = base2.encode(bytes);
cache2.set(prefix, cid2);
return cid2;
} else {
return cid;
}
}
var DAG_PB_CODE = 112;
var SHA_256_CODE = 18;
function encodeCID(version, code, multihash) {
const codeOffset = encodingLength(version);
const hashOffset = codeOffset + encodingLength(code);
const bytes = new Uint8Array(hashOffset + multihash.byteLength);
encodeTo(version, bytes, 0);
encodeTo(code, bytes, codeOffset);
bytes.set(multihash, hashOffset);
return bytes;
}
var cidSymbol = Symbol.for("@ipld/js-cid/CID");
// src/multicodec.ts
var Multicodec = class _Multicodec {
static {
/**
* A static field containing a map of codec codes to their corresponding names.
*/
this.codeToName = /* @__PURE__ */ new Map();
}
static {
/**
* A static field containing a map of codec names to their corresponding codes.
*/
this.nameToCode = /* @__PURE__ */ new Map();
}
/**
* Adds a multicodec prefix to input data.
*
* @param options - The options for adding a prefix.
* @param options.code - The codec code. Either the code or name must be provided.
* @param options.name - The codec name. Either the code or name must be provided.
* @param options.data - The data to be prefixed.
* @returns The data with the added prefix as a Uint8Array.
*/
static addPrefix(options) {
let { code, data, name } = options;
if (!(name ? !code : code)) {
throw new Error(`Either 'name' or 'code' must be defined, but not both.`);
}
code = _Multicodec.codeToName.has(code) ? code : _Multicodec.nameToCode.get(name);
if (code === void 0) {
throw new Error(`Unsupported multicodec: ${options.name ?? options.code}`);
}
const prefixLength = varint_exports.encodingLength(code);
const dataWithPrefix = new Uint8Array(prefixLength + data.byteLength);
dataWithPrefix.set(data, prefixLength);
varint_exports.encodeTo(code, dataWithPrefix);
return dataWithPrefix;
}
/**
* Get the Multicodec code from given prefixed data.
*
* @param options - The options for getting the codec code.
* @param options.prefixedData - The data to extract the codec code from.
* @returns - The Multicodec code as a number.
*/
static getCodeFromData(options) {
const { prefixedData } = options;
const [code, _] = varint_exports.decode(prefixedData);
return code;
}
/**
* Get the Multicodec code from given Multicodec name.
*
* @param options - The options for getting the codec code.
* @param options.name - The name to lookup.
* @returns - The Multicodec code as a number.
*/
static getCodeFromName(options) {
const { name } = options;
const code = _Multicodec.nameToCode.get(name);
if (code === void 0) {
throw new Error(`Unsupported multicodec: ${name}`);
}
return code;
}
/**
* Get the Multicodec name from given Multicodec code.
*
* @param options - The options for getting the codec name.
* @param options.name - The code to lookup.
* @returns - The Multicodec name as a string.
*/
static getNameFromCode(options) {
const { code } = options;
const name = _Multicodec.codeToName.get(code);
if (name === void 0) {
throw new Error(`Unsupported multicodec: ${code}`);
}
return name;
}
/**
* Registers a new codec in the Multicodec class.
*
* @param codec - The codec to be registered.
*/
static registerCodec(codec) {
_Multicodec.codeToName.set(codec.code, codec.name);
_Multicodec.nameToCode.set(codec.name, codec.code);
}
/**
* Returns the data with the Multicodec prefix removed.
*
* @param refixedData - The data to extract the codec code from.
* @returns {Uint8Array}
*/
static removePrefix(options) {
const { prefixedData } = options;
const [code, codeByteLength] = varint_exports.decode(prefixedData);
const name = _Multicodec.codeToName.get(code);
if (name === void 0) {
throw new Error(`Unsupported multicodec: ${code}`);
}
return { code, data: prefixedData.slice(codeByteLength), name };
}
};
Multicodec.registerCodec({ code: 237, name: "ed25519-pub" });
Multicodec.registerCodec({ code: 4864, name: "ed25519-priv" });
Multicodec.registerCodec({ code: 236, name: "x25519-pub" });
Multicodec.registerCodec({ code: 4866, name: "x25519-priv" });
Multicodec.registerCodec({ code: 231, name: "secp256k1-pub" });
Multicodec.registerCodec({ code: 4865, name: "secp256k1-priv" });
// src/object.ts
function isEmptyObject(obj) {
if (typeof obj !== "object" || obj === null) {
return false;
}
if (Object.getOwnPropertySymbols(obj).length > 0) {
return false;
}
return Object.keys(obj).length === 0;
}
function removeEmptyObjects(obj) {
Object.keys(obj).forEach((key) => {
if (typeof obj[key] === "object") {
removeEmptyObjects(obj[key]);
}
if (isEmptyObject(obj[key])) {
delete obj[key];
}
});
}
function removeUndefinedProperties(obj) {
Object.keys(obj).forEach((key) => {
if (obj[key] === void 0) {
delete obj[key];
} else if (typeof obj[key] === "object") {
removeUndefinedProperties(obj[key]);
}
});
}
// src/stores.ts
var import_level = require("level");
var LevelStore = class {
constructor({ db, location = "DATASTORE" } = {}) {
this.store = db ?? new import_level.Level(location);
}
async clear() {
await this.store.clear();
}
async close() {
await this.store.close();
}
async delete(key) {
await this.store.del(key);
}
async get(key) {
try {
return await this.store.get(key);
} catch (error) {
if (error.notFound) return void 0;
throw error;
}
}
async set(key, value) {
await this.store.put(key, value);
}
};
var MemoryStore = class {
constructor() {
/**
* A private field that contains the Map used as the key-value store.
*/
this.store = /* @__PURE__ */ new Map();
}
/**
* Clears all entries in the key-value store.
*
* @returns A Promise that resolves when the operation is complete.
*/
async clear() {
this.store.clear();
}
/**
* This operation is no-op for `MemoryStore`
* and will log a warning if called.
*/
async close() {
}
/**
* Deletes an entry from the key-value store by its key.
*
* @param id - The key of the entry to delete.
* @returns A Promise that resolves to a boolean indicating whether the entry was successfully deleted.
*/
async delete(id) {
return this.store.delete(id);
}
/**
* Retrieves the value of an entry by its key.
*
* @param id - The key of the entry to retrieve.
* @returns A Promise that resolves to the value of the entry, or `undefined` if the entry does not exist.
*/
async get(id) {
return this.store.get(id);
}
/**
* Checks for the presence of an entry by key.
*
* @param id - The key to check for the existence of.
* @returns A Promise that resolves to a boolean indicating whether an element with the specified key exists or not.
*/
async has(id) {
return this.store.has(id);
}
/**
* Retrieves all values in the key-value store.
*
* @returns A Promise that resolves to an array of all values in the store.
*/
async list() {
return Array.from(this.store.values());
}
/**
* Sets the value of an entry in the key-value store.
*
* @param id - The key of the entry to set.
* @param key - The new value for the entry.
* @returns A Promise that resolves when the operation is complete.
*/
async set(id, key) {
this.store.set(id, key);
}
};
// src/stream.ts
var Stream = class _Stream {
/**
* Transforms a `ReadableStream` into an `AsyncIterable`. This allows for the asynchronous
* iteration over the stream's data chunks.
*
* This method creates an async iterator from a `ReadableStream`, enabling the use of
* `for await...of` loops to process stream data. It reads from the stream until it's closed or
* errored, yielding each chunk as it becomes available.
*
* @example
* ```ts
* const readableStream = new ReadableStream({ ... });
* for await (const chunk of Stream.asAsyncIterator(readableStream)) {
* // process each chunk
* }
* ```
*
* @remarks
* - The method ensures proper cleanup by releasing the reader lock when iteration is completed or
* if an error occurs.
*
* @param readableStream - The Web `ReadableStream` to be transformed into an `AsyncIterable`.
* @returns An `AsyncIterable` that yields data chunks from the `ReadableStream`.
*/
static async *asAsyncIterator(readableStream) {
const reader = readableStream.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
yield value;
}
} finally {
reader.releaseLock();
}
}
/**
* Consumes a `ReadableStream` and returns its contents as an `ArrayBuffer`.
*
* This method reads all data from a `ReadableStream`, collects it, and converts it into an
* `ArrayBuffer`.
*
* @example
* ```ts
* const readableStream = new ReadableStream({ ... });
* const arrayBuffer = await Stream.consumeToArrayBuffer({ readableStream });
* ```
*
* @param readableStream - The Web `ReadableStream` whose data will be consumed.
* @returns A Promise that resolves to an `ArrayBuffer` containing all the data from the stream.
*/
static async consumeToArrayBuffer({ readableStream }) {
const iterableStream = _Stream.asAsyncIterator(readableStream);
const arrayBuffer = await Convert.asyncIterable(iterableStream).toArrayBufferAsync();
return arrayBuffer;
}
/**
* Consumes a `ReadableStream` and returns its contents as a `Blob`.
*
* This method reads all data from a `ReadableStream`, collects it, and converts it into a `Blob`.
*
* @example
* ```ts
* const readableStream = new ReadableStream({ ... });
* const blob = await Stream.consumeToBlob({ readableStream });
* ```
*
* @param readableStream - The Web `ReadableStream` whose data will be consumed.
* @returns A Promise that resolves to a `Blob` containing all the data from the stream.
*/
static async consumeToBlob({ readableStream }) {
const iterableStream = _Stream.asAsyncIterator(readableStream);
const blob = await Convert.asyncIterable(iterableStream).toBlobAsync();
return blob;
}
/**
* Consumes a `ReadableStream` and returns its contents as a `Uint8Array`.
*
* This method reads all data from a `ReadableStream`, collects it, and converts it into a
* `Uint8Array`.
*
* @example
* ```ts
* const readableStream = new ReadableStream({ ... });
* const bytes = await Stream.consumeToBytes({ readableStream });
* ```
*
* @param readableStream - The Web `ReadableStream` whose data will be consumed.
* @returns A Promise that resolves to a `Uint8Array` containing all the data from the stream.
*/
static async consumeToBytes({ readableStream }) {
const iterableStream = _Stream.asAsyncIterator(readableStream);
const bytes = await Convert.asyncIterable(iterableStream).toUint8ArrayAsync();
return bytes;
}
/**
* Consumes a `ReadableStream` and parses its contents as JSON.
*
* This method reads all the data from the stream, converts it to a text string, and then parses
* it as JSON, returning the resulting object.
*
* @example
* ```ts
* const readableStream = new ReadableStream({ ... });
* const jsonData = await Stream.consumeToJson({ readableStream });
* ```
*
* @param readableStream - The Web `ReadableStream` whose JSON content will be consumed.
* @returns A Promise that resolves to the parsed JSON object from the stream's data.
*/
static async consumeToJson({ readableStream }) {
const iterableStream = _Stream.asAsyncIterator(readableStream);
const object =