@ethersphere/bee-js
Version:
Javascript client for Bee
171 lines (170 loc) • 8.87 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.makeFeedWriter = exports.makeFeedReader = exports.downloadFeedUpdateAsCAC = exports.downloadFeedUpdate = exports.getFeedUpdateChunkReference = exports.updateFeedWithPayload = exports.updateFeedWithReference = exports.findNextIndex = void 0;
const cafe_utility_1 = require("cafe-utility");
const cac_1 = require("../chunk/cac");
const soc_1 = require("../chunk/soc");
const bytes = __importStar(require("../modules/bytes"));
const chunkAPI = __importStar(require("../modules/chunk"));
const feed_1 = require("../modules/feed");
const bytes_1 = require("../utils/bytes");
const error_1 = require("../utils/error");
const resource_locator_1 = require("../utils/resource-locator");
const typed_bytes_1 = require("../utils/typed-bytes");
const identifier_1 = require("./identifier");
const TIMESTAMP_PAYLOAD_OFFSET = 0;
const TIMESTAMP_PAYLOAD_SIZE = 8;
const REFERENCE_PAYLOAD_OFFSET = TIMESTAMP_PAYLOAD_SIZE;
async function findNextIndex(requestOptions, owner, topic) {
try {
const feedUpdate = await (0, feed_1.fetchLatestFeedUpdate)(requestOptions, owner, topic);
if (!feedUpdate.feedIndexNext) {
throw Error('Feed index next is not defined. This should happen when fetching an exact index.');
}
return feedUpdate.feedIndexNext;
}
catch (e) {
if (e instanceof error_1.BeeResponseError) {
return typed_bytes_1.FeedIndex.fromBigInt(0n);
}
throw e;
}
}
exports.findNextIndex = findNextIndex;
async function updateFeedWithReference(requestOptions, signer, topic, reference, postageBatchId, options) {
reference = new typed_bytes_1.Reference(reference);
const nextIndex = options?.index ?? (await findNextIndex(requestOptions, signer.publicKey().address(), topic));
const identifier = (0, identifier_1.makeFeedIdentifier)(topic, nextIndex);
const at = options?.at ?? Date.now() / 1000.0;
const timestamp = cafe_utility_1.Binary.numberToUint64(BigInt(Math.floor(at)), 'BE');
const payloadBytes = cafe_utility_1.Binary.concatBytes(timestamp, reference.toUint8Array());
return (0, soc_1.uploadSingleOwnerChunkData)(requestOptions, signer, postageBatchId, identifier, payloadBytes, options);
}
exports.updateFeedWithReference = updateFeedWithReference;
async function updateFeedWithPayload(requestOptions, signer, topic, data, postageBatchId, options) {
const nextIndex = options?.index ?? (await findNextIndex(requestOptions, signer.publicKey().address(), topic));
const identifier = (0, identifier_1.makeFeedIdentifier)(topic, nextIndex);
if (data.length > 4096) {
const uploadResult = await bytes.upload(requestOptions, data, postageBatchId, options);
const rootChunk = await chunkAPI.download(requestOptions, uploadResult.reference);
return (0, soc_1.uploadSingleOwnerChunkWithWrappedChunk)(requestOptions, signer, postageBatchId, identifier, rootChunk, options);
}
return (0, soc_1.uploadSingleOwnerChunkData)(requestOptions, signer, postageBatchId, identifier, cafe_utility_1.Types.isString(data) ? bytes_1.Bytes.fromUtf8(data).toUint8Array() : data, options);
}
exports.updateFeedWithPayload = updateFeedWithPayload;
function getFeedUpdateChunkReference(owner, topic, index) {
const identifier = (0, identifier_1.makeFeedIdentifier)(topic, index);
return new typed_bytes_1.Reference(cafe_utility_1.Binary.keccak256(cafe_utility_1.Binary.concatBytes(identifier.toUint8Array(), owner.toUint8Array())));
}
exports.getFeedUpdateChunkReference = getFeedUpdateChunkReference;
async function downloadFeedUpdate(requestOptions, owner, topic, index, hasTimestamp = false) {
index = typeof index === 'number' ? typed_bytes_1.FeedIndex.fromBigInt(BigInt(index)) : index;
const address = getFeedUpdateChunkReference(owner, topic, index);
const data = await chunkAPI.download(requestOptions, address.toHex());
const soc = (0, soc_1.makeSingleOwnerChunkFromData)(data, address);
let timestamp = cafe_utility_1.Optional.empty();
if (hasTimestamp) {
const timestampBytes = bytes_1.Bytes.fromSlice(soc.payload.toUint8Array(), TIMESTAMP_PAYLOAD_OFFSET, TIMESTAMP_PAYLOAD_SIZE);
timestamp = cafe_utility_1.Optional.of(Number(cafe_utility_1.Binary.uint64ToNumber(timestampBytes.toUint8Array(), 'BE')));
}
return {
timestamp,
payload: new bytes_1.Bytes(soc.payload.offset(hasTimestamp ? REFERENCE_PAYLOAD_OFFSET : 0)),
};
}
exports.downloadFeedUpdate = downloadFeedUpdate;
async function downloadFeedUpdateAsCAC(requestOptions, owner, topic, index) {
index = typeof index === 'number' ? typed_bytes_1.FeedIndex.fromBigInt(BigInt(index)) : index;
const address = getFeedUpdateChunkReference(owner, topic, index);
const data = await chunkAPI.download(requestOptions, address);
return (0, cac_1.asContentAddressedChunk)(data.slice(typed_bytes_1.Identifier.LENGTH + typed_bytes_1.Signature.LENGTH));
}
exports.downloadFeedUpdateAsCAC = downloadFeedUpdateAsCAC;
function makeFeedReader(requestOptions, topic, owner) {
// TODO: remove after enough time has passed in deprecated version
const download = async (options) => {
if (options?.index === undefined) {
return (0, feed_1.fetchLatestFeedUpdate)(requestOptions, owner, topic);
}
const update = await downloadFeedUpdate(requestOptions, owner, topic, options.index, options.hasTimestamp ?? true);
const feedIndex = typeof options.index === 'number' ? typed_bytes_1.FeedIndex.fromBigInt(BigInt(options.index)) : options.index;
return {
payload: update.payload,
feedIndex,
feedIndexNext: feedIndex.next(),
};
};
const downloadPayload = async (options) => {
if (options?.index === undefined) {
return (0, feed_1.fetchLatestFeedUpdate)(requestOptions, owner, topic);
}
const cac = await downloadFeedUpdateAsCAC(requestOptions, owner, topic, options.index);
const payload = cac.span.toBigInt() <= 4096n
? cac.payload
: await bytes.download(requestOptions, new resource_locator_1.ResourceLocator(cac.address));
const feedIndex = typeof options.index === 'number' ? typed_bytes_1.FeedIndex.fromBigInt(BigInt(options.index)) : options.index;
return {
payload,
feedIndex,
feedIndexNext: feedIndex.next(),
};
};
const downloadReference = async (options) => {
let index = options?.index;
if (index === undefined) {
index = (await (0, feed_1.probeFeed)(requestOptions, owner, topic)).feedIndex;
}
const payload = await download({ ...options, index: index });
return {
reference: new typed_bytes_1.Reference(payload.payload.toUint8Array()),
feedIndex: payload.feedIndex,
feedIndexNext: payload.feedIndexNext ?? payload.feedIndex.next(),
};
};
return {
download,
downloadPayload,
downloadReference,
owner,
topic,
};
}
exports.makeFeedReader = makeFeedReader;
function makeFeedWriter(requestOptions, topic, signer) {
const upload = async (postageBatchId, reference, options) => {
return updateFeedWithReference(requestOptions, signer, topic, reference, postageBatchId, options);
};
const uploadPayload = async (postageBatchId, data, options) => {
return updateFeedWithPayload(requestOptions, signer, topic, data, postageBatchId, options);
};
return {
...makeFeedReader(requestOptions, topic, signer.publicKey().address()),
upload,
uploadReference: upload,
uploadPayload,
};
}
exports.makeFeedWriter = makeFeedWriter;