UNPKG

@apillon/sdk

Version:

▶◀ Apillon SDK for NodeJS ▶◀

238 lines 10.9 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __rest = (this && this.__rest) || function (s, e) { var t = {}; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p]; if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]]; } return t; }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.calculateCID = exports.uploadFiles = void 0; /* eslint-disable security/detect-non-literal-fs-filename */ const fs = __importStar(require("fs")); const path = __importStar(require("path")); const axios_1 = __importDefault(require("axios")); const ipfs_unixfs_importer_1 = require("ipfs-unixfs-importer"); const apillon_logger_1 = require("../lib/apillon-logger"); const apillon_api_1 = require("../lib/apillon-api"); const apillon_1 = require("../types/apillon"); const crypto_1 = require("crypto"); async function uploadFiles(uploadParams) { const { folderPath, apiPrefix, params } = uploadParams; let files = uploadParams.files; if (folderPath) { apillon_logger_1.ApillonLogger.log(`Preparing to upload files from ${folderPath}...`); } else if (files === null || files === void 0 ? void 0 : files.length) { apillon_logger_1.ApillonLogger.log(`Preparing to upload ${files.length} files...`); } else { throw new Error('Invalid upload parameters received'); } // If folderPath param passed, read files from local storage if (folderPath && !(files === null || files === void 0 ? void 0 : files.length)) { try { files = readFilesFromFolder(folderPath, params === null || params === void 0 ? void 0 : params.ignoreFiles); } catch (err) { apillon_logger_1.ApillonLogger.log(err.message, apillon_1.LogLevel.ERROR); throw new Error(`Error reading files in ${folderPath}`); } } apillon_logger_1.ApillonLogger.log(`Total files to upload: ${files.length}`); // Split files into chunks for parallel uploading const fileChunkSize = 200; const sessionUuid = uuidv4(); const uploadedFiles = []; for (const fileGroup of chunkify(files, fileChunkSize)) { if (params === null || params === void 0 ? void 0 : params.wrapWithDirectory) { for (const fg of fileGroup) { fg.content = fg.index ? fs.readFileSync(fg.index) : fg.content; } const { files } = await apillon_api_1.ApillonApi.post(`${apiPrefix}/upload`, { // eslint-disable-next-line @typescript-eslint/no-unused-vars files: fileGroup.map((_a) => { var { content } = _a, rest = __rest(_a, ["content"]); return rest; }), sessionUuid, }); await uploadFilesToS3(files, fileGroup); files.forEach((file) => delete file.url); uploadedFiles.push(files); } else { const metadata = { files: [], urls: [], cids: [], }; for (const fg of fileGroup) { // eslint-disable-next-line @typescript-eslint/no-unused-vars const { content } = fg, rest = __rest(fg, ["content"]); metadata.files.push(rest); const readContent = fg.index ? fs.readFileSync(fg.index) : fg.content; fg.content = readContent; const cid = await (0, exports.calculateCID)(readContent, { cidVersion: 1, }); metadata.cids.push(cid); } const { links } = await apillon_api_1.ApillonApi.post(`/storage/link-on-ipfs-multiple`, { cids: metadata.cids }); metadata.urls = links; const { files } = await apillon_api_1.ApillonApi.post(`${apiPrefix}/upload`, { files: metadata.files, sessionUuid, }); await uploadFilesToS3(files, fileGroup); const filesWithUrl = files.map((file, index) => (Object.assign(Object.assign({}, file), { CID: metadata.cids[index], url: metadata.urls[index] }))); uploadedFiles.push(filesWithUrl); } } apillon_logger_1.ApillonLogger.logWithTime('File upload complete.'); apillon_logger_1.ApillonLogger.log('Closing upload session...'); await apillon_api_1.ApillonApi.post(`${apiPrefix}/upload/${sessionUuid}/end`, params); apillon_logger_1.ApillonLogger.logWithTime('Upload session ended.'); return { sessionUuid, files: uploadedFiles.flatMap((f) => f) }; } exports.uploadFiles = uploadFiles; function readFilesFromFolder(folderPath, ignoreFiles = true) { const gitignorePatterns = []; if (ignoreFiles) { apillon_logger_1.ApillonLogger.log('Ignoring files from .gitignore during upload.'); const gitignorePath = path.join(folderPath, '.gitignore'); if (fs.existsSync(gitignorePath)) { gitignorePatterns.push(...fs.readFileSync(gitignorePath, 'utf-8').split('\n')); } // Ignore the following files by default when ignoreFiles = true gitignorePatterns.push('\\.git/?$', '\\.gitignore$', 'node_modules/?', '\\.env$'); } const folderFiles = listFilesRecursive(folderPath); return folderFiles.filter((file) => // Skip files that match .gitignore patterns !gitignorePatterns.some((pattern) => new RegExp(pattern).test(file.fileName) || new RegExp(pattern).test(file.path))); } function listFilesRecursive(folderPath, fileList = [], relativePath = '') { const files = fs.readdirSync(folderPath); for (const file of files) { const fullPath = path.join(folderPath, file); const relativeFilePath = path.join(relativePath, file); if (fs.statSync(fullPath).isDirectory()) { listFilesRecursive(fullPath, fileList, `${relativeFilePath}/`); } else { fileList.push({ fileName: file, path: relativePath, index: fullPath }); } } return fileList.sort((a, b) => a.fileName.localeCompare(b.fileName)); } async function uploadFilesToS3(uploadLinks, files) { const s3Api = axios_1.default.create(); const uploadWorkers = []; for (const link of uploadLinks) { const file = files.find((x) => x.fileName === link.fileName && (!x.path || x.path === link.path)); if (!file) { throw new Error(`Can't find file ${link.path}${link.fileName}!`); } uploadWorkers.push(new Promise(async (resolve, _reject) => { // If uploading from local folder then read file, otherwise directly upload content await s3Api.put(link.url, file.content); apillon_logger_1.ApillonLogger.log(`File uploaded: ${file.fileName}`); resolve(); })); } await Promise.all(uploadWorkers); } function chunkify(files, chunkSize = 10) { // Divide files into chunks for parallel processing and uploading const fileChunks = []; for (let i = 0; i < files.length; i += chunkSize) { fileChunks.push(files.slice(i, i + chunkSize)); } return fileChunks; } function uuidv4() { const bytes = (0, crypto_1.randomBytes)(16); // Set the version (4) and variant (8, 9, A, or B) bits bytes[6] = (bytes[6] & 0x0f) | 0x40; // Version 4 bytes[8] = (bytes[8] & 0x3f) | 0x80; // Variant (8, 9, A, or B) // Convert bytes to hexadecimal and format the UUID const uuid = bytes.toString('hex'); return `${uuid.substring(0, 8)}-${uuid.substring(8, 12)}-${uuid.substring(12, 16)}-${uuid.substring(16, 20)}-${uuid.substring(20)}`; } const calculateCID = async (content, options) => { var _a, e_1, _b, _c; options.onlyHash = true; if (typeof content === 'string') { content = new TextEncoder().encode(content); } let lastCid; try { for (var _d = true, _e = __asyncValues((0, ipfs_unixfs_importer_1.importer)([{ content }], { get: async (cid) => { throw new Error(`unexpected block API get for ${cid}`); }, put: async () => { throw new Error('unexpected block API put'); }, }, options)), _f; _f = await _e.next(), _a = _f.done, !_a;) { _c = _f.value; _d = false; try { const { cid } = _c; lastCid = cid; } finally { _d = true; } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) await _b.call(_e); } finally { if (e_1) throw e_1.error; } } return `${lastCid}`; }; exports.calculateCID = calculateCID; //# sourceMappingURL=file-utils.js.map