UNPKG

@occupop/lib-s3-storage

Version:

Tiny S3 storage helper (AWS & LocalStack) with injectable bucket, signed URLs and public URLs.

147 lines (145 loc) 6.08 kB
"use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( // If the importer is in node compatibility mode or this is not an ESM // file that has been converted to a CommonJS file using a Babel- // compatible transform (i.e. "__esModule" has not been set), then set // "default" to the CommonJS "module.exports" for node compatibility. isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // src/index.ts var index_exports = {}; __export(index_exports, { createS3Storage: () => createS3Storage }); module.exports = __toCommonJS(index_exports); // src/s3-storage.ts var import_client_s3 = require("@aws-sdk/client-s3"); var import_s3_request_presigner = require("@aws-sdk/s3-request-presigner"); var import_crc_32 = __toESM(require("crc-32"), 1); var createS3Storage = (cfg) => { const isLocal = !!cfg.endpoint && /localstack/i.test(cfg.endpoint); const accelerate = !isLocal && !!cfg.accelerate; const region = cfg.region; const defaultBucket = cfg.defaultBucket ?? ""; const defaultExpires = cfg.defaultExpiresInSeconds ?? 180; const publicMode = cfg.publicUrlMode ?? "raw"; const publicAcl = !!cfg.publicAcl; const cacheControl = cfg.cacheControl ?? "private, max-age=0, no-cache"; const client = new import_client_s3.S3Client({ region: cfg.region, credentials: cfg.credentials, ...cfg.endpoint && { endpoint: cfg.endpoint }, ...typeof cfg.forcePathStyle !== "undefined" && { forcePathStyle: cfg.forcePathStyle }, ...cfg.accelerate && { useAccelerateEndpoint: true } }); const pickBucket = (override) => { const b = (override ?? defaultBucket).trim(); if (!b) throw new Error("Bucket is required"); return b; }; const normalizeKey = (bucket, raw) => raw.replace(/^\/+/, "").replace(/\/{2,}/g, "/").replace(`${bucket}/`, ""); const encodeKeyPath = (k) => k.split("/").map(encodeURIComponent).join("/"); const buildRawUrl = (bucket, key) => { const encoded = encodeKeyPath(key); if (isLocal && cfg.endpoint) { return `${cfg.endpoint.replace(/\/$/, "")}/${bucket}/${encoded}`; } if (accelerate) { return `https://${bucket}.s3-accelerate.amazonaws.com/${encoded}`; } return `https://${bucket}.s3.${region}.amazonaws.com/${encoded}`; }; const crc32Base64 = (buf) => { const signed = import_crc_32.default.buf(buf) >>> 0; const b = Buffer.alloc(4); b.writeUInt32BE(signed, 0); return b.toString("base64"); }; const contentDisposition = (fileName) => fileName ? `inline; filename="${encodeURIComponent(fileName)}"` : void 0; const getSignedPutUrl = async (input) => { const bucket = pickBucket(input.bucket); const key = normalizeKey(bucket, input.path); const expiresIn = input.expiresInSeconds ?? defaultExpires; const cmd = new import_client_s3.PutObjectCommand({ Bucket: bucket, Key: key, ContentType: input.contentType, // ContentDisposition: contentDisposition(input.fileName), ...input.checksumCRC32Base64 ? { ChecksumCRC32: input.checksumCRC32Base64 } : {} }); const url = await (0, import_s3_request_presigner.getSignedUrl)(client, cmd, { expiresIn }); return { url, bucket, key, expiresIn }; }; const putObject = async (input) => { const bucket = pickBucket(input.bucket); const key = normalizeKey(bucket, input.path); const body = Buffer.from(input.bodyBase64, "base64"); const checksum = crc32Base64(body); const commandInput = { Bucket: bucket, Key: key, Body: body, ContentType: input.contentType, // ContentDisposition: contentDisposition(input.fileName), CacheControl: input.cacheControl ?? cacheControl, ChecksumCRC32: checksum, ...publicAcl ? { ACL: "public-read" } : {} }; const command = new import_client_s3.PutObjectCommand(commandInput); await client.send(command); const url = publicMode === "signed" ? (await getSignedGetUrl({ path: key, bucket })).url : buildRawUrl(bucket, key); return { url, bucket, key }; }; const getSignedGetUrl = async ({ path, bucket, expiresInSeconds }) => { const b = pickBucket(bucket); const key = normalizeKey(b, path); const expiresIn = expiresInSeconds ?? defaultExpires; const cmd = new import_client_s3.GetObjectCommand({ Bucket: b, Key: key }); const url = await (0, import_s3_request_presigner.getSignedUrl)(client, cmd, { expiresIn }); return { url, bucket: b, key, expiresIn }; }; const getPublicUrl = async (input) => { const bucket = pickBucket(input.bucket); const key = normalizeKey(bucket, input.path); const mode = input.mode ?? publicMode; if (mode === "signed") { return (await getSignedGetUrl({ path: key, bucket, expiresInSeconds: input.expiresInSeconds ?? 300 })).url; } return buildRawUrl(bucket, key); }; return { getSignedPutUrl, putObject, getPublicUrl, getSignedGetUrl }; }; // Annotate the CommonJS export names for ESM import in node: 0 && (module.exports = { createS3Storage }); //# sourceMappingURL=index.cjs.map