@nuvix/storage
Version:
S3-compatible storage library for Nuvix BaaS platform with support for AWS S3, Wasabi, MinIO, and local storage
1,297 lines (1,281 loc) • 44.9 kB
JavaScript
;
var promises = require('fs/promises');
var fs = require('fs');
var path = require('path');
var crypto = require('crypto');
class Device {
constructor() {
/**
* Max chunk size while transferring file from one device to another
*/
this.transferChunkSize = 20000000; // 20 MB
}
/**
* Set Transfer Chunk Size
*/
setTransferChunkSize(chunkSize) {
this.transferChunkSize = chunkSize;
}
/**
* Get Transfer Chunk Size
*/
getTransferChunkSize() {
return this.transferChunkSize;
}
/**
* Move file from given source to given path, return true on success and false on failure.
*/
async move(source, target) {
if (source === target) {
return false;
}
if (await this.transfer(source, target, this)) {
return await this.delete(source);
}
return false;
}
/**
* Get the absolute path by resolving strings like ../, .., //, /\ and so on.
*
* Works like the realpath function but works on files that does not exist
*/
getAbsolutePath(path) {
const normalizedPath = path.replace(/[/\\]/g, "/");
const parts = normalizedPath.split("/").filter((part) => part.length > 0);
const absolutes = [];
for (const part of parts) {
if (part === ".") {
continue;
}
if (part === "..") {
absolutes.pop();
}
else {
absolutes.push(part);
}
}
return "/" + absolutes.join("/");
}
async getMimeType(filePath) {
const path = await import('path');
// Node.js doesn't have a built-in MIME type module, so we'll use a comprehensive mapping
const ext = path.extname(filePath).toLowerCase().slice(1);
const mimeTypes = {
// Text
txt: "text/plain",
html: "text/html",
htm: "text/html",
css: "text/css",
csv: "text/csv",
xml: "text/xml",
// JavaScript
js: "application/javascript",
mjs: "application/javascript",
json: "application/json",
// Images
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
svg: "image/svg+xml",
webp: "image/webp",
ico: "image/x-icon",
bmp: "image/bmp",
tiff: "image/tiff",
tif: "image/tiff",
// Documents
pdf: "application/pdf",
doc: "application/msword",
docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
xls: "application/vnd.ms-excel",
xlsx: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
ppt: "application/vnd.ms-powerpoint",
pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation",
// Audio
mp3: "audio/mpeg",
wav: "audio/wav",
ogg: "audio/ogg",
m4a: "audio/mp4",
// Video
mp4: "video/mp4",
avi: "video/x-msvideo",
mov: "video/quicktime",
wmv: "video/x-ms-wmv",
flv: "video/x-flv",
webm: "video/webm",
// Archives
zip: "application/zip",
rar: "application/x-rar-compressed",
tar: "application/x-tar",
gz: "application/gzip",
"7z": "application/x-7z-compressed",
// Other
bin: "application/octet-stream",
exe: "application/octet-stream",
dmg: "application/octet-stream",
};
return mimeTypes[ext] || "application/octet-stream";
}
}
/**
* Sets the maximum number of keys returned to the response. By default, the action returns up to 1,000 key names.
*/
Device.MAX_PAGE_SIZE = Number.MAX_SAFE_INTEGER;
class Storage {
/**
* Set Device.
*
* Add device by name
*
* @param name
* @param device
* @throws Error
*/
static setDevice(name, device) {
this.devices.set(name, device);
}
/**
* Get Device.
*
* Get device by name
*
* @param name
* @returns Device
* @throws Error
*/
static getDevice(name) {
if (!this.devices.has(name)) {
throw new Error(`The device "${name}" is not listed`);
}
return this.devices.get(name);
}
/**
* Exists.
*
* Checks if given storage name is registered or not
*
* @param name
* @returns boolean
*/
static exists(name) {
return this.devices.has(name);
}
/**
* Human readable data size format from bytes input.
*
* Based on: https://stackoverflow.com/a/38659168/2299554
*
* @param bytes
* @param decimals
* @param system
* @returns string
*/
static human(bytes, decimals = 2, system = "metric") {
const mod = system === "binary" ? 1024 : 1000;
const units = {
binary: ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"],
metric: ["B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"],
};
const factor = Math.floor((bytes.toString().length - 1) / 3);
return `${(bytes / Math.pow(mod, factor)).toFixed(decimals)}${units[system][factor]}`;
}
}
/**
* Supported devices
*/
Storage.DEVICE_LOCAL = "local";
Storage.DEVICE_S3 = "s3";
// static readonly DEVICE_DO_SPACES = 'dospaces';
Storage.DEVICE_WASABI = "wasabi";
Storage.DEVICE_MINIO = "minio";
// static readonly DEVICE_BACKBLAZE = 'backblaze';
// static readonly DEVICE_LINODE = 'linode';
/**
* Devices.
*
* List of all available storage devices
*/
Storage.devices = new Map();
class Validator {
}
Validator.TYPE_STRING = "string";
Validator.TYPE_ARRAY = "array";
Validator.TYPE_INTEGER = "integer";
Validator.TYPE_BOOLEAN = "boolean";
class File extends Validator {
getDescription() {
return "File is not valid";
}
/**
* NOT MUCH RIGHT NOW.
*
* TODO think what to do here, currently only used for parameter to be present in SDKs
*
* @param name
* @return boolean
*/
isValid(name) {
return true;
}
}
class FileExt extends Validator {
constructor(allowed) {
super();
this.allowed = allowed;
}
getDescription() {
return "File extension is not valid";
}
isValid(filename) {
const ext = this.getFileExtension(filename).toLowerCase();
return this.allowed.includes(ext);
}
getFileExtension(filename) {
const lastDotIndex = filename.lastIndexOf(".");
return lastDotIndex !== -1 ? filename.substring(lastDotIndex + 1) : "";
}
}
FileExt.TYPE_JPEG = "jpeg";
FileExt.TYPE_JPG = "jpg";
FileExt.TYPE_GIF = "gif";
FileExt.TYPE_PNG = "png";
FileExt.TYPE_GZIP = "gz";
FileExt.TYPE_ZIP = "zip";
class FileName extends Validator {
/**
* Get Description
*/
getDescription() {
return "Filename is not valid";
}
/**
* The file name can only contain "a-z", "A-Z", "0-9" and "." and not empty.
*
* @param name - The filename to validate
* @returns boolean indicating if the filename is valid
*/
isValid(name) {
if (!name) {
return false;
}
if (typeof name !== "string") {
return false;
}
if (!/^[a-zA-Z0-9.]+$/.test(name)) {
return false;
}
return true;
}
}
class FileSize extends Validator {
/**
* Max size in bytes
*/
constructor(max) {
super();
this.max = max;
}
/**
* Get Description
*/
getDescription() {
return `File size can't be bigger than ${this.max}`;
}
/**
* Finds whether a file size is smaller than required limit.
*/
isValid(fileSize) {
if (typeof fileSize !== "number" || !Number.isInteger(fileSize)) {
return false;
}
if (fileSize > this.max || fileSize < 0) {
return false;
}
return true;
}
}
class FileType extends Validator {
constructor(allowed) {
super();
/**
* File Type Binaries.
*/
this.types = {
[FileType.FILE_TYPE_JPEG]: "\xFF\xD8\xFF",
[FileType.FILE_TYPE_GIF]: "GIF",
[FileType.FILE_TYPE_PNG]: "\x89\x50\x4e\x47\x0d\x0a",
[FileType.FILE_TYPE_GZIP]: "application/x-gzip",
};
for (const key of allowed) {
if (!(key in this.types)) {
throw new Error("Unknown file mime type");
}
}
this.allowed = allowed;
}
/**
* Get Description
*/
getDescription() {
return "File mime-type is not allowed ";
}
/**
* Is Valid.
*
* Binary check to finds whether a file is of valid type
*/
async isValid(path) {
try {
const buffer = await promises.readFile(path);
// Calculate the maximum signature length needed
const maxSignatureLength = Math.max(...Object.values(this.types).map((sig) => sig.length));
const bytesToRead = Math.min(buffer.length, Math.max(8, maxSignatureLength));
const bytes = buffer.toString("binary", 0, bytesToRead);
for (const key of this.allowed) {
if (bytes.indexOf(this.types[key]) === 0) {
return true;
}
}
return false;
}
catch {
return false;
}
}
}
/**
* File Types Constants.
*/
FileType.FILE_TYPE_JPEG = "jpeg";
FileType.FILE_TYPE_GIF = "gif";
FileType.FILE_TYPE_PNG = "png";
FileType.FILE_TYPE_GZIP = "gz";
class Upload extends Validator {
/**
* Get Description
*/
getDescription() {
return "Not a valid upload file";
}
/**
* Check if a file is a valid upload file
*
* @param path - The file path to validate
* @returns Promise that resolves to true if valid upload file, false otherwise
*/
async isValid(path) {
if (typeof path !== "string") {
return false;
}
try {
const stats = await fs.promises.stat(path);
return stats.isFile();
}
catch {
return false;
}
}
}
class Local extends Device {
constructor(root = "") {
super();
this.root = "temp";
this.MAX_PAGE_SIZE = 1000;
this.root = root;
}
getName() {
return "Local Storage";
}
getType() {
return Storage.DEVICE_LOCAL;
}
getDescription() {
return "Adapter for Local storage that is in the physical or virtual machine or mounted to it.";
}
getRoot() {
return this.root;
}
getPath(filename, prefix) {
return this.getAbsolutePath(path.join(this.getRoot(), filename));
}
async upload(source, filePath, chunk = 1, chunks = 1, metadata = {}) {
await this.createDirectory(path.dirname(filePath));
if (chunks === 1) {
try {
await fs.promises.rename(source, filePath);
return chunks;
}
catch {
throw new Error(`Can't upload file ${filePath}`);
}
}
const tmp = path.join(path.dirname(filePath), `tmp_${path.basename(filePath)}`, `${path.basename(filePath)}_chunks.log`);
await this.createDirectory(path.dirname(tmp));
const chunkFilePath = path.join(path.dirname(tmp), `${path.parse(filePath).name}.part.${chunk}`);
if (!(await this.exists(chunkFilePath))) {
try {
await fs.promises.appendFile(tmp, `${chunk}\n`);
}
catch {
throw new Error(`Can't write chunk log ${tmp}`);
}
}
try {
const chunkLogs = await fs.promises.readFile(tmp, "utf8");
const chunksReceived = chunkLogs.trim().split("\n").length;
await fs.promises.rename(source, chunkFilePath);
if (chunks === chunksReceived) {
await this.joinChunks(filePath, chunks);
return chunksReceived;
}
return chunksReceived;
}
catch {
throw new Error(`Failed to write chunk ${chunk}`);
}
}
async uploadData(data, filePath, contentType, chunk = 1, chunks = 1, metadata = {}) {
await this.createDirectory(path.dirname(filePath));
if (chunks === 1) {
try {
await fs.promises.writeFile(filePath, data);
return chunks;
}
catch {
throw new Error(`Can't write file ${filePath}`);
}
}
const tmp = path.join(path.dirname(filePath), `tmp_${path.basename(filePath)}`, `${path.basename(filePath)}_chunks.log`);
await this.createDirectory(path.dirname(tmp));
try {
await fs.promises.appendFile(tmp, `${chunk}\n`);
const chunkLogs = await fs.promises.readFile(tmp, "utf8");
const chunksReceived = chunkLogs.trim().split("\n").length;
const chunkFilePath = path.join(path.dirname(tmp), `${path.parse(filePath).name}.part.${chunk}`);
await fs.promises.writeFile(chunkFilePath, data);
if (chunks === chunksReceived) {
await this.joinChunks(filePath, chunks);
return chunksReceived;
}
return chunksReceived;
}
catch {
throw new Error(`Failed to write chunk ${chunk}`);
}
}
async joinChunks(filePath, chunks) {
const tmp = path.join(path.dirname(filePath), `tmp_${path.basename(filePath)}`, `${path.basename(filePath)}_chunks.log`);
for (let i = 1; i <= chunks; i++) {
const part = path.join(path.dirname(tmp), `${path.parse(filePath).name}.part.${i}`);
try {
const data = await fs.promises.readFile(part);
await fs.promises.appendFile(filePath, data);
await fs.promises.unlink(part);
}
catch {
throw new Error(`Failed to read/append chunk ${part}`);
}
}
await fs.promises.unlink(tmp);
await fs.promises.rmdir(path.dirname(tmp));
}
async transfer(filePath, destination, device) {
if (!(await this.exists(filePath))) {
throw new Error("File Not Found");
}
const size = await this.getFileSize(filePath);
const contentType = await this.getFileMimeType(filePath);
if (size <= this.transferChunkSize) {
const source = await this.read(filePath);
return await device.write(destination, source, contentType);
}
const totalChunks = Math.ceil(size / this.transferChunkSize);
const metadata = { content_type: contentType };
for (let counter = 0; counter < totalChunks; counter++) {
const start = counter * this.transferChunkSize;
const data = await this.read(filePath, start, this.transferChunkSize);
await device.uploadData(data, destination, contentType, counter + 1, totalChunks, metadata);
}
return true;
}
async abort(filePath, extra = "") {
if (await this.exists(filePath)) {
await fs.promises.unlink(filePath);
}
const tmp = path.join(path.dirname(filePath), `tmp_${path.basename(filePath)}`);
if (!(await this.exists(path.dirname(tmp)))) {
throw new Error(`File doesn't exist: ${path.dirname(filePath)}`);
}
const files = await this.getFiles(tmp);
for (const file of files) {
await this.delete(file, true);
}
return fs.promises
.rmdir(tmp)
.then(() => true)
.catch(() => false);
}
async read(filePath, offset = 0, length) {
if (!(await this.exists(filePath))) {
throw new Error("File Not Found");
}
const fileHandle = await fs.promises.open(filePath, "r");
try {
const size = length ?? (await fileHandle.stat()).size - offset;
const buffer = Buffer.alloc(size);
await fileHandle.read(buffer, 0, size, offset);
return buffer;
}
finally {
await fileHandle.close();
}
}
async write(filePath, data, contentType = "") {
try {
await this.createDirectory(path.dirname(filePath));
await fs.promises.writeFile(filePath, data);
return true;
}
catch {
throw new Error(`Can't write to path ${filePath}`);
}
}
async move(source, target) {
if (source === target) {
return false;
}
try {
await this.createDirectory(path.dirname(target));
await fs.promises.rename(source, target);
return true;
}
catch {
return false;
}
}
async delete(filePath, recursive = false) {
try {
const stats = await fs.promises.stat(filePath);
if (stats.isDirectory() && recursive) {
const files = await this.getFiles(filePath);
for (const file of files) {
await this.delete(file, true);
}
await fs.promises.rmdir(filePath);
}
else if (stats.isFile() || stats.isSymbolicLink()) {
await fs.promises.unlink(filePath);
}
return true;
}
catch {
return false;
}
}
async deletePath(filePath) {
const fullPath = path.resolve(path.join(this.getRoot(), filePath));
try {
if (!(await this.exists(fullPath))) {
return false;
}
const stats = await fs.promises.stat(fullPath);
if (!stats.isDirectory()) {
return false;
}
const files = await this.getFiles(fullPath);
for (const file of files) {
const stats = await fs.promises.stat(file);
if (stats.isDirectory()) {
const relativePath = file.replace(this.getRoot() + path.sep, "");
await this.deletePath(relativePath);
}
else {
await this.delete(file, true);
}
}
await fs.promises.rmdir(fullPath);
return true;
}
catch {
return false;
}
}
async exists(filePath) {
try {
await fs.promises.access(filePath);
return true;
}
catch {
return false;
}
}
async getFileSize(filePath) {
const stats = await fs.promises.stat(filePath);
return stats.size;
}
async getFileMimeType(filePath) {
return this.getMimeType(filePath);
}
async getFileHash(filePath) {
const data = await fs.promises.readFile(filePath);
return crypto.createHash("md5").update(data).digest("hex");
}
async createDirectory(dirPath) {
try {
await fs.promises.mkdir(dirPath, { recursive: true, mode: 0o755 });
return true;
}
catch {
return false;
}
}
async getDirectorySize(dirPath) {
try {
let size = 0;
const files = await fs.promises.readdir(dirPath, { withFileTypes: true });
for (const file of files) {
if (file.name.startsWith("."))
continue;
const fullPath = path.join(dirPath, file.name);
if (file.isDirectory()) {
size += await this.getDirectorySize(fullPath);
}
else {
const stats = await fs.promises.stat(fullPath);
size += stats.size;
}
}
return size;
}
catch {
return -1;
}
}
async getPartitionFreeSpace() {
const stats = await fs.promises.statfs(this.getRoot());
return stats.bavail * stats.bsize;
}
async getPartitionTotalSpace() {
const stats = await fs.promises.statfs(this.getRoot());
return stats.blocks * stats.bsize;
}
async getFiles(dir, max = this.MAX_PAGE_SIZE, continuationToken = "") {
try {
const entries = await fs.promises.readdir(dir, { withFileTypes: true });
return entries.map((entry) => path.join(dir, entry.name));
}
catch {
return [];
}
}
}
class S3 extends Device {
constructor(root, accessKey, secretKey, bucket, region = S3.US_EAST_1, acl = S3.ACL_PRIVATE, endpointUrl = "") {
super();
this.acl = S3.ACL_PRIVATE;
this.root = "temp";
this.headers = {
host: "",
date: "",
"content-md5": "",
"content-type": "",
};
this.accessKey = accessKey;
this.secretKey = secretKey;
this.bucket = bucket;
this.region = region;
this.root = root;
this.acl = acl;
this.amzHeaders = {};
let host;
if (endpointUrl) {
host = `${bucket}.${endpointUrl}`;
}
else {
switch (region) {
case S3.CN_NORTH_1:
case S3.CN_NORTH_4:
case S3.CN_NORTHWEST_1:
host = `${bucket}.s3.${region}.amazonaws.cn`;
break;
default:
host = `${bucket}.s3.${region}.amazonaws.com`;
}
}
this.headers["host"] = host;
}
getName() {
return "S3 Storage";
}
getType() {
return Storage.DEVICE_S3;
}
getDescription() {
return "S3 Bucket Storage drive for AWS or on premise solution";
}
getRoot() {
return this.root;
}
getPath(filename, prefix) {
return `${this.getRoot()}/${filename}`;
}
getPartitionTotalSpace() {
throw new Error("Method not implemented.");
}
static setRetryAttempts(attempts) {
S3.retryAttempts = attempts;
}
static setRetryDelay(delay) {
S3.retryDelay = delay;
}
async upload(source, path, chunk = 1, chunks = 1, metadata = {}) {
let data;
let contentType;
if (typeof source === "string") {
const fs = await import('fs');
data = await fs.promises.readFile(source);
contentType = await this.getMimeType(source);
}
else {
data = source;
contentType = metadata.contentType || "application/octet-stream";
}
return this.uploadData(data, path, contentType, chunk, chunks, metadata);
}
async uploadData(data, path, contentType, chunk = 1, chunks = 1, metadata = {}) {
if (chunk === 1 && chunks === 1) {
await this.write(path, data, contentType);
return 1;
}
let uploadId = metadata["uploadId"];
if (!uploadId) {
uploadId = await this.createMultipartUpload(path, contentType);
metadata["uploadId"] = uploadId;
}
metadata["parts"] = metadata["parts"] || {};
metadata["chunks"] = metadata["chunks"] || 0;
const etag = await this.uploadPart(data, path, contentType, chunk, uploadId);
const cleanETag = etag.replace(/^"|"$/g, "");
if (!(chunk in metadata["parts"])) {
metadata["chunks"]++;
}
metadata["parts"][chunk] = cleanETag;
if (metadata["chunks"] === chunks) {
await this.completeMultipartUpload(path, uploadId, metadata["parts"]);
}
return metadata["chunks"];
}
async transfer(path, destination, device) {
try {
const response = await this.getInfo(path);
const size = parseInt(response["content-length"] || "0");
const contentType = response["content-type"] || "";
if (size <= this.transferChunkSize) {
const source = await this.read(path);
return device.write(destination, source, contentType);
}
const totalChunks = Math.ceil(size / this.transferChunkSize);
const metadata = { content_type: contentType };
for (let counter = 0; counter < totalChunks; counter++) {
const start = counter * this.transferChunkSize;
const data = await this.read(path, start, this.transferChunkSize);
await device.uploadData(data, destination, contentType, counter + 1, totalChunks, metadata);
}
return true;
}
catch (e) {
throw new Error("File not found");
}
}
async createMultipartUpload(path, contentType) {
const uri = path !== ""
? `/${encodeURIComponent(path).replace(/%2F/g, "/").replace(/%3F/g, "?")}`
: "/";
this.headers["content-md5"] = Buffer.from(this.md5("")).toString("base64");
delete this.amzHeaders["x-amz-content-sha256"];
this.headers["content-type"] = contentType;
this.amzHeaders["x-amz-acl"] = this.acl;
const response = await this.call(S3.METHOD_POST, uri, "", { uploads: "" });
const uploadId = response.body?.InitiateMultipartUploadResult?.UploadId?.[0];
return uploadId;
}
async uploadPart(data, path, contentType, chunk, uploadId) {
const uri = path !== ""
? `/${encodeURIComponent(path).replace(/%2F/g, "/").replace(/%3F/g, "?")}`
: "/";
this.headers["content-type"] = contentType;
this.headers["content-md5"] = Buffer.from(this.md5(data)).toString("base64");
this.amzHeaders["x-amz-content-sha256"] = this.sha256(data);
delete this.amzHeaders["x-amz-acl"];
const response = await this.call(S3.METHOD_PUT, uri, data, {
partNumber: chunk.toString(),
uploadId: uploadId,
});
return response.headers["etag"];
}
async completeMultipartUpload(path, uploadId, parts) {
const uri = path !== ""
? `/${encodeURIComponent(path).replace(/%2F/g, "/").replace(/%3F/g, "?")}`
: "/";
let body = "<CompleteMultipartUpload>";
for (const [key, etag] of Object.entries(parts)) {
body += `<Part><ETag>${etag}</ETag><PartNumber>${key}</PartNumber></Part>`;
}
body += "</CompleteMultipartUpload>";
this.amzHeaders["x-amz-content-sha256"] = this.sha256(body);
this.headers["content-md5"] = Buffer.from(this.md5(body)).toString("base64");
await this.call(S3.METHOD_POST, uri, body, { uploadId });
return true;
}
async abort(path, extra = "") {
const uri = path !== "" ? `/${encodeURIComponent(path).replace(/%2F/g, "/")}` : "/";
delete this.headers["content-type"];
this.headers["content-md5"] = Buffer.from(this.md5("")).toString("base64");
await this.call(S3.METHOD_DELETE, uri, "", { uploadId: extra });
return true;
}
async read(path, offset = 0, length) {
delete this.amzHeaders["x-amz-acl"];
delete this.amzHeaders["x-amz-content-sha256"];
delete this.headers["content-type"];
this.headers["content-md5"] = Buffer.from(this.md5("")).toString("base64");
const uri = path !== "" ? `/${encodeURIComponent(path).replace(/%2F/g, "/")}` : "/";
if (length !== undefined) {
const end = offset + length - 1;
this.headers["range"] = `bytes=${offset}-${end}`;
}
else {
delete this.headers["range"];
}
const response = await this.call(S3.METHOD_GET, uri, "", {}, false);
return Buffer.from(response.buffer);
}
async write(path, data, contentType = "") {
const uri = path !== ""
? `/${encodeURIComponent(path).replace(/%2F/g, "/").replace(/%3F/g, "?")}`
: "/";
this.headers["content-type"] = contentType;
this.headers["content-md5"] = Buffer.from(this.md5(data)).toString("base64");
this.amzHeaders["x-amz-content-sha256"] = this.sha256(data);
this.amzHeaders["x-amz-acl"] = this.acl;
await this.call(S3.METHOD_PUT, uri, data);
return true;
}
async delete(path, recursive = false) {
const uri = path !== "" ? `/${encodeURIComponent(path).replace(/%2F/g, "/")}` : "/";
delete this.headers["content-type"];
delete this.amzHeaders["x-amz-acl"];
delete this.amzHeaders["x-amz-content-sha256"];
this.headers["content-md5"] = Buffer.from(this.md5("")).toString("base64");
await this.call(S3.METHOD_DELETE, uri);
return true;
}
async listObjects(prefix = "", maxKeys = S3.MAX_PAGE_SIZE, continuationToken = "") {
if (maxKeys > S3.MAX_PAGE_SIZE) {
throw new Error(`Cannot list more than ${S3.MAX_PAGE_SIZE} objects`);
}
const uri = "/";
prefix = prefix.replace(/^\//, "");
this.headers["content-type"] = "text/plain";
this.headers["content-md5"] = Buffer.from(this.md5("")).toString("base64");
delete this.amzHeaders["x-amz-content-sha256"];
delete this.amzHeaders["x-amz-acl"];
const parameters = {
"list-type": "2",
prefix: prefix,
"max-keys": maxKeys.toString(),
};
if (continuationToken) {
parameters["continuation-token"] = continuationToken;
}
const response = await this.call(S3.METHOD_GET, uri, "", parameters);
return response.body;
}
async deletePath(path) {
path = `${this.getRoot()}/${path}`;
const uri = "/";
let continuationToken = "";
do {
const objects = await this.listObjects(path, S3.MAX_PAGE_SIZE, continuationToken);
const count = parseInt(objects["KeyCount"] || "1");
if (count < 1)
break;
continuationToken = objects["NextContinuationToken"] || "";
let body = '<Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/">';
if (count > 1) {
for (const object of objects["Contents"]) {
body += `<Object><Key>${object["Key"]}</Key></Object>`;
}
}
else {
body += `<Object><Key>${objects["Contents"]["Key"]}</Key></Object>`;
}
body += "<Quiet>true</Quiet></Delete>";
this.amzHeaders["x-amz-content-sha256"] = this.sha256(body);
this.headers["content-md5"] = Buffer.from(this.md5(body)).toString("base64");
await this.call(S3.METHOD_POST, uri, body, { delete: "" });
} while (continuationToken);
return true;
}
async exists(path) {
try {
await this.getInfo(path);
return true;
}
catch {
return false;
}
}
async getFileSize(path) {
const response = await this.getInfo(path);
return parseInt(response["content-length"] || "0");
}
async getFileMimeType(path) {
const response = await this.getInfo(path);
return response["content-type"] || "";
}
async getFileHash(path) {
const etag = (await this.getInfo(path))["etag"] || "";
return etag ? etag.slice(1, -1) : etag;
}
async createDirectory(path) {
return true; // S3 doesn't have directories
}
async getDirectorySize(path) {
return -1;
}
async getPartitionFreeSpace() {
return -1;
}
async getFiles(dir, max = S3.MAX_PAGE_SIZE, continuationToken = "") {
const data = await this.listObjects(dir, max, continuationToken);
data["IsTruncated"] = data["IsTruncated"] === "true";
data["KeyCount"] = parseInt(data["KeyCount"]);
data["MaxKeys"] = parseInt(data["MaxKeys"]);
return data;
}
async getInfo(path) {
delete this.headers["content-type"];
delete this.amzHeaders["x-amz-acl"];
delete this.amzHeaders["x-amz-content-sha256"];
this.headers["content-md5"] = Buffer.from(this.md5("")).toString("base64");
const uri = path !== "" ? `/${encodeURIComponent(path).replace(/%2F/g, "/")}` : "/";
const response = await this.call(S3.METHOD_HEAD, uri);
return response.headers;
}
getSignatureV4(method, uri, parameters = {}) {
const service = "s3";
const region = this.region;
const algorithm = "AWS4-HMAC-SHA256";
const combinedHeaders = {};
const amzDateStamp = this.amzHeaders["x-amz-date"].substring(0, 8);
// Combine headers
for (const [k, v] of Object.entries(this.headers)) {
combinedHeaders[k.toLowerCase()] = v.trim();
}
for (const [k, v] of Object.entries(this.amzHeaders)) {
combinedHeaders[k.toLowerCase()] = v.trim();
}
// Sort headers
const sortedHeaders = Object.keys(combinedHeaders).sort();
const sortedCombinedHeaders = {};
for (const key of sortedHeaders) {
sortedCombinedHeaders[key] = combinedHeaders[key];
}
// Sort parameters
const sortedParams = Object.keys(parameters).sort();
const queryString = sortedParams
.map((key) => `${encodeURIComponent(key)}=${encodeURIComponent(parameters[key])}`)
.join("&");
// Create canonical request
const canonicalRequest = [
method,
uri.split("?")[0],
queryString,
...Object.entries(sortedCombinedHeaders).map(([k, v]) => `${k}:${v}`),
"",
Object.keys(sortedCombinedHeaders).join(";"),
this.amzHeaders["x-amz-content-sha256"],
].join("\n");
// Create string to sign
const credentialScope = [
amzDateStamp,
region,
service,
"aws4_request",
].join("/");
const stringToSign = [
algorithm,
this.amzHeaders["x-amz-date"],
credentialScope,
this.sha256(canonicalRequest),
].join("\n");
// Calculate signature
const kSecret = `AWS4${this.secretKey}`;
const kDate = this.hmacSha256(amzDateStamp, kSecret);
const kRegion = this.hmacSha256(region, kDate);
const kService = this.hmacSha256(service, kRegion);
const kSigning = this.hmacSha256("aws4_request", kService);
const signature = this.hmacSha256(stringToSign, kSigning, "hex");
return `${algorithm} Credential=${this.accessKey}/${credentialScope},SignedHeaders=${Object.keys(sortedCombinedHeaders).join(";")},Signature=${signature}`;
}
async call(method, uri, data = "", parameters = {}, decode = true) {
uri = this.getAbsolutePath(uri);
const queryString = Object.keys(parameters).length
? "?" +
Object.entries(parameters)
.map(([k, v]) => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`)
.join("&")
: "";
const url = `https://${this.headers["host"]}${uri}${queryString}`;
this.amzHeaders["x-amz-date"] = new Date()
.toISOString()
.replace(/[:-]|\.\d{3}/g, "");
if (!this.amzHeaders["x-amz-content-sha256"]) {
this.amzHeaders["x-amz-content-sha256"] = this.sha256(data);
}
const headers = {};
for (const [header, value] of Object.entries(this.amzHeaders)) {
if (value.length > 0) {
headers[header] = value;
}
}
this.headers["date"] = new Date().toUTCString();
for (const [header, value] of Object.entries(this.headers)) {
if (value.length > 0) {
headers[header] = value;
}
}
headers["Authorization"] = this.getSignatureV4(method, uri, parameters);
const fetchOptions = {
method,
headers,
};
if (method === S3.METHOD_PUT || method === S3.METHOD_POST) {
fetchOptions.body = data;
}
const response = await fetch(url, fetchOptions);
if (!response.ok) {
const errorBody = await response.text();
throw new Error(`HTTP ${response.status}: ${errorBody}`);
}
const responseHeaders = {};
response.headers.forEach((value, key) => {
responseHeaders[key.toLowerCase()] = value;
});
const buffer = await response.arrayBuffer();
let body = new TextDecoder().decode(buffer);
if (decode &&
(responseHeaders["content-type"] === "application/xml" ||
(body.startsWith("<?xml") &&
responseHeaders["content-type"] !== "image/svg+xml"))) {
const xml2js = await import('xml2js');
const parser = new xml2js.Parser();
body = await parser.parseStringPromise(body);
}
return {
body,
buffer,
headers: responseHeaders,
code: response.status,
};
}
md5(data) {
const buffer = typeof data === "string" ? Buffer.from(data, "utf8") : data;
return crypto.createHash("md5").update(buffer).digest();
}
sha256(data) {
const buffer = typeof data === "string" ? Buffer.from(data, "utf8") : data;
return crypto.createHash("sha256").update(buffer).digest("hex");
}
hmacSha256(data, key, encoding = "binary") {
const dataBuffer = typeof data === "string" ? Buffer.from(data, "utf8") : data;
const hmac = crypto.createHmac("sha256", key);
hmac.update(dataBuffer);
return encoding === "hex" ? hmac.digest("hex") : hmac.digest();
}
}
// HTTP Methods
S3.METHOD_GET = "GET";
S3.METHOD_POST = "POST";
S3.METHOD_PUT = "PUT";
S3.METHOD_PATCH = "PATCH";
S3.METHOD_DELETE = "DELETE";
S3.METHOD_HEAD = "HEAD";
S3.METHOD_OPTIONS = "OPTIONS";
S3.METHOD_CONNECT = "CONNECT";
S3.METHOD_TRACE = "TRACE";
// AWS Regions
S3.US_EAST_1 = "us-east-1";
S3.US_EAST_2 = "us-east-2";
S3.US_WEST_1 = "us-west-1";
S3.US_WEST_2 = "us-west-2";
S3.AF_SOUTH_1 = "af-south-1";
S3.AP_EAST_1 = "ap-east-1";
S3.AP_SOUTH_1 = "ap-south-1";
S3.AP_NORTHEAST_3 = "ap-northeast-3";
S3.AP_NORTHEAST_2 = "ap-northeast-2";
S3.AP_NORTHEAST_1 = "ap-northeast-1";
S3.AP_SOUTHEAST_1 = "ap-southeast-1";
S3.AP_SOUTHEAST_2 = "ap-southeast-2";
S3.CA_CENTRAL_1 = "ca-central-1";
S3.EU_CENTRAL_1 = "eu-central-1";
S3.EU_WEST_1 = "eu-west-1";
S3.EU_SOUTH_1 = "eu-south-1";
S3.EU_WEST_2 = "eu-west-2";
S3.EU_WEST_3 = "eu-west-3";
S3.EU_NORTH_1 = "eu-north-1";
S3.SA_EAST_1 = "eu-north-1";
S3.CN_NORTH_1 = "cn-north-1";
S3.CN_NORTH_4 = "cn-north-4";
S3.CN_NORTHWEST_1 = "cn-northwest-1";
S3.ME_SOUTH_1 = "me-south-1";
S3.US_GOV_EAST_1 = "us-gov-east-1";
S3.US_GOV_WEST_1 = "us-gov-west-1";
// ACL Flags
S3.ACL_PRIVATE = "private";
S3.ACL_PUBLIC_READ = "public-read";
S3.ACL_PUBLIC_READ_WRITE = "public-read-write";
S3.ACL_AUTHENTICATED_READ = "authenticated-read";
S3.MAX_PAGE_SIZE = 1000;
S3.retryAttempts = 3;
S3.retryDelay = 500;
class Wasabi extends S3 {
/**
* Wasabi Constructor
*/
constructor(root, accessKey, secretKey, bucket, region = Wasabi.EU_CENTRAL_1, acl = S3.ACL_PRIVATE) {
super(root, accessKey, secretKey, bucket, region, acl);
this.headers["host"] = `${bucket}.s3.${region}.wasabisys.com`;
}
getName() {
return "Wasabi Storage";
}
getDescription() {
return "Wasabi Storage";
}
getType() {
return Storage.DEVICE_WASABI;
}
}
/**
* Regions constants
*/
Wasabi.US_WEST_1 = "us-west-1";
Wasabi.AP_NORTHEAST_1 = "ap-northeast-1";
Wasabi.AP_NORTHEAST_2 = "ap-northeast-2";
Wasabi.EU_CENTRAL_1 = "eu-central-1";
Wasabi.EU_CENTRAL_2 = "eu-central-2";
Wasabi.EU_WEST_1 = "eu-west-1";
Wasabi.EU_WEST_2 = "eu-west-2";
Wasabi.US_CENTRAL_1 = "us-central-1";
Wasabi.US_EAST_1 = "us-east-1";
Wasabi.US_EAST_2 = "us-east-2";
class MinIO extends S3 {
/**
* MinIO Constructor
*/
constructor(root, accessKey, secretKey, bucket, endpoint = "localhost:9000", acl = S3.ACL_PRIVATE, useSSL = false) {
const protocol = useSSL ? "https" : "http";
// Remove protocol from endpoint if it exists
const cleanEndpoint = endpoint.replace(/^https?:\/\//, "");
super(root, accessKey, secretKey, bucket, "us-east-1", acl, cleanEndpoint);
// Override the host for MinIO
this.headers["host"] = `${bucket}.${cleanEndpoint}`;
// Store the full endpoint URL for MinIO-specific operations
this.endpointUrl = `${protocol}://${cleanEndpoint}`;
}
getName() {
return "MinIO Storage";
}
getDescription() {
return "MinIO S3-compatible object storage server";
}
getType() {
return Storage.DEVICE_MINIO;
}
/**
* Override the call method to use the custom endpoint
*/
async call(method, uri, data = "", parameters = {}, decode = true) {
// Temporarily modify the host header to not include the bucket for MinIO
const originalHost = this.headers["host"];
// For MinIO, we need to handle path-style URLs differently
const bucketName = originalHost.split(".")[0];
const endpoint = originalHost.split(".").slice(1).join(".");
// Use path-style URLs for MinIO
this.headers["host"] = endpoint;
const queryString = Object.keys(parameters).length
? "?" +
Object.entries(parameters)
.map(([k, v]) => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`)
.join("&")
: "";
const pathStyleUri = `/${bucketName}${this.getAbsolutePath(uri)}`;
const url = `${this.endpointUrl}${pathStyleUri}${queryString}`;
this.amzHeaders["x-amz-date"] = new Date()
.toISOString()
.replace(/[:-]|\.\d{3}/g, "");
if (!this.amzHeaders["x-amz-content-sha256"]) {
this.amzHeaders["x-amz-content-sha256"] = this.sha256(data);
}
const headers = {};
for (const [header, value] of Object.entries(this.amzHeaders)) {
if (value.length > 0) {
headers[header] = value;
}
}
this.headers["date"] = new Date().toUTCString();
for (const [header, value] of Object.entries(this.headers)) {
if (value.length > 0) {
headers[header] = value;
}
}
headers["Authorization"] = this.getSignatureV4(method, pathStyleUri, parameters);
const fetchOptions = {
method,
headers,
};
if (method === S3.METHOD_PUT || method === S3.METHOD_POST) {
fetchOptions.body = data;
}
const response = await fetch(url, fetchOptions);
// Restore original host
this.headers["host"] = originalHost;
if (!response.ok) {
const errorBody = await response.text();
throw new Error(`HTTP ${response.status}: ${errorBody}`);
}
const responseHeaders = {};
response.headers.forEach((value, key) => {
responseHeaders[key.toLowerCase()] = value;
});
let body = await response.text();
if (decode &&
(responseHeaders["content-type"] === "application/xml" ||
(body.startsWith("<?xml") &&
responseHeaders["content-type"] !== "image/svg+xml"))) {
const xml2js = await import('xml2js');
const parser = new xml2js.Parser();
body = await parser.parseStringPromise(body);
}
return {
body,
headers: responseHeaders,
code: response.status,
};
}
}
exports.Device = Device;
exports.File = File;
exports.FileExt = FileExt;
exports.FileName = FileName;
exports.FileSize = FileSize;
exports.FileType = FileType;
exports.Local = Local;
exports.MinIO = MinIO;
exports.S3 = S3;
exports.Storage = Storage;
exports.Upload = Upload;
exports.Wasabi = Wasabi;
//# sourceMappingURL=index.cjs.js.map