UNPKG

balena-sdk

Version:
257 lines (256 loc) • 10.9 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const tslib_1 = require("tslib"); const util_1 = require("../util"); const balena_errors_1 = require("balena-errors"); const once_1 = tslib_1.__importDefault(require("lodash/once")); class ReleaseAssetAlreadyExists extends balena_errors_1.BalenaError { constructor(release, asset_key) { super(new Error(`Release asset combination of ${release} and ${asset_key} already exists`)); } } const MINIMUM_MULTIPART_SIZE = 5 * 1024 * 1024; // 5MiB const DEFAULT_MULTIPART_CHUNK_SIZE = 5 * 1024 * 1024; // 5MiB const DEFAULT_MULTIPART_PARALLEL_UPLOAD = 5; const getFileBasename = (filePath) => { var _a, _b; return (_b = (_a = filePath.split('/').pop()) !== null && _a !== void 0 ? _a : filePath.split('\\').pop()) !== null && _b !== void 0 ? _b : filePath; }; const getReleaseAssetModel = function (deps, opts, getRelease) { const assetHelpers = (0, once_1.default)(() => (opts.isBrowser ? // eslint-disable-next-line @typescript-eslint/no-require-imports require('../util/asset-helpers.browser') : // eslint-disable-next-line @typescript-eslint/no-require-imports require('../util/asset-helpers')).assetHelpers); const { pine, request } = deps; const parseUrlForRequest = (href) => { const url = new URL(href); const baseUrl = `${url.protocol}//${url.host}`; const pathname = url.pathname; const qs = {}; url.searchParams.forEach((value, key) => { qs[key] = value; }); return { baseUrl, url: pathname, qs, sendToken: false, }; }; const uploadPart = async (asset, part, requestedPartSize) => { var _a; let body; const offset = (part.partNumber - 1) * requestedPartSize; if (typeof asset === 'string') { const buffer = await assetHelpers().readFileChunk(asset, offset, part.chunkSize); body = buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); } else { const end = offset + part.chunkSize; body = asset.slice(offset, end); } // We directly use fetch API rather than balena-request because // pre-signed S3 URLs contain authentication in query parameters // and must be used exactly as provided without modification (encoding/decoding for balena-request) const res = await fetch(part.url, { method: 'PUT', body, }); const ETag = (_a = res.headers.get('ETag')) === null || _a === void 0 ? void 0 : _a.replace(/^"+|"+$/g, ''); if (ETag == null || typeof ETag !== 'string') { throw new Error(`Error on the received ETag ${ETag}`); } return { PartNumber: part.partNumber, ETag, }; }; const uploadParts = async (asset, parts, uploadParams, totalSize, onUploadProgress) => { let totalUploaded = 0; return await (0, util_1.limitedMap)(parts, async (part) => { const result = await uploadPart(asset, part, uploadParams.chunkSize); totalUploaded += part.chunkSize; if (onUploadProgress != null) { await onUploadProgress({ total: totalSize, uploaded: Math.min(totalUploaded, totalSize), }); } return result; }, { concurrency: uploadParams.parallelUploads }); }; const uploadMultipartReleaseAsset = async (releaseAssetId, asset, uploadParams, onUploadProgress) => { let metadata; if (typeof asset === 'string') { const size = await assetHelpers().getFileSize(asset); metadata = { filename: getFileBasename(asset), content_type: assetHelpers().getMimeType(asset), size, }; } else { metadata = { filename: asset.name, content_type: asset.type || 'application/octet-stream', size: asset.size, }; } const beginUploadResponse = (await pine.post({ resource: 'release_asset', id: releaseAssetId, action: 'beginUpload', body: { asset: Object.assign(Object.assign({}, metadata), { chunk_size: uploadParams.chunkSize }), }, })); await pine.post({ resource: 'release_asset', id: releaseAssetId, action: 'commitUpload', body: { uuid: beginUploadResponse.asset.uuid, providerCommitData: { Parts: await uploadParts(asset, beginUploadResponse.asset.uploadParts, uploadParams, metadata.size, onUploadProgress), }, }, }); }; const createMultipartReleaseAsset = async ({ asset, asset_key, release }, uploadParams, onUploadProgress) => { const releaseAsset = await pine.post({ resource: 'release_asset', body: { asset_key, release, asset: null, }, }); if (asset != null) { await uploadMultipartReleaseAsset(releaseAsset.id, asset, uploadParams, onUploadProgress); } return await exports.get(releaseAsset.id); }; const getId = async (id) => { return typeof id === 'number' || typeof id.release === 'number' ? // @ts-expect-error - typescript should be able to infer this id : { release: (await getRelease(id.release, { $select: 'id' })).id, asset_key: id.asset_key, }; }; const exports = { async getAllByRelease(commitOrIdOrRawVersion, options) { const release = await getRelease(commitOrIdOrRawVersion, { $select: 'id', $expand: { release_asset: (0, util_1.mergePineOptions)({ $orderby: { id: 'asc' } }, options), }, }); return release.release_asset; }, async get(id, options) { const releaseAssetId = await getId(id); const releaseAsset = await pine.get({ resource: 'release_asset', id: releaseAssetId, options, }); if (releaseAsset == null) { throw new Error(`Release asset not found '${JSON.stringify(releaseAssetId)}.'`); } return releaseAsset; }, async download(id) { const { asset } = await exports.get(id, { $select: 'asset' }); if (asset == null) { throw new Error('Release asset does not contain any uploaded file'); } return await request.stream(Object.assign({ method: 'GET' }, parseUrlForRequest(asset.href))); }, async upload(uploadParams, { chunkSize = DEFAULT_MULTIPART_CHUNK_SIZE, parallelUploads = DEFAULT_MULTIPART_PARALLEL_UPLOAD, overwrite = false, onUploadProgress, } = {}) { const { asset } = uploadParams, restParams = tslib_1.__rest(uploadParams, ["asset"]); let size; let normalizedParams; if (typeof asset === 'string') { size = await assetHelpers().getFileSize(asset); if (size <= MINIMUM_MULTIPART_SIZE) { const buffer = await assetHelpers().readFileChunk(asset, 0, size); normalizedParams = Object.assign(Object.assign({}, restParams), { asset: new File([buffer], getFileBasename(asset), { type: assetHelpers().getMimeType(asset), }) }); } } else { size = asset.size; if (size <= MINIMUM_MULTIPART_SIZE) { normalizedParams = uploadParams; } } // The pattern executed here of first getting the release_asset and then post/patch // is slightly different than most places where we would use upsert, the main reason for that // is to avoid posting a file (higher bandwitch usage) that we know would fail, so we do first a get // and based on that result and the overwrite flag we either patch or post const existingReleaseAsset = await pine.get({ resource: 'release_asset', id: { asset_key: restParams.asset_key, release: restParams.release }, options: { $select: 'id' }, }); if (existingReleaseAsset != null && !overwrite) { throw new ReleaseAssetAlreadyExists(restParams.release, restParams.asset_key); } if (size <= MINIMUM_MULTIPART_SIZE) { // Multipart request (file on the wire) if (onUploadProgress != null) { await onUploadProgress({ total: size, uploaded: 0, }); } let result; if (existingReleaseAsset != null) { await pine.patch({ id: existingReleaseAsset.id, resource: 'release_asset', body: { asset: normalizedParams.asset }, }); result = await exports.get(existingReleaseAsset.id); } else { result = await pine.post({ resource: 'release_asset', body: normalizedParams, }); } if (onUploadProgress != null) { await onUploadProgress({ total: size, uploaded: size, }); } return result; } // Multipart upload if (existingReleaseAsset != null) { await uploadMultipartReleaseAsset(existingReleaseAsset.id, asset, { chunkSize, parallelUploads, }, onUploadProgress); return await exports.get(existingReleaseAsset.id); } return await createMultipartReleaseAsset(uploadParams, { chunkSize, parallelUploads, }, onUploadProgress); }, async remove(id) { await pine.delete({ resource: 'release_asset', id: await getId(id), }); }, }; return exports; }; exports.default = getReleaseAssetModel;