s3mini
Version:
πΆ Tiny & fast S3 client for node and edge computing platforms
1,138 lines (1,136 loc) β’ 65.3 kB
JavaScript
// Constants
const AWS_ALGORITHM = 'AWS4-HMAC-SHA256';
const AWS_REQUEST_TYPE = 'aws4_request';
const S3_SERVICE = 's3';
const LIST_TYPE = '2';
const UNSIGNED_PAYLOAD = 'UNSIGNED-PAYLOAD';
const DEFAULT_STREAM_CONTENT_TYPE = 'application/octet-stream';
const XML_CONTENT_TYPE = 'application/xml';
// List of keys that might contain sensitive information
const SENSITIVE_KEYS_REDACTED = ['accessKeyId', 'secretAccessKey', 'sessionToken', 'password', 'token'];
const DEFAULT_REQUEST_SIZE_IN_BYTES = 8 * 1024 * 1024;
// Headers
const HEADER_AMZ_CONTENT_SHA256 = 'x-amz-content-sha256';
const HEADER_AMZ_CHECKSUM_SHA256 = 'x-amz-checksum-sha256';
const HEADER_AMZ_DATE = 'x-amz-date';
const HEADER_HOST = 'host';
const HEADER_AUTHORIZATION = 'authorization';
const HEADER_CONTENT_TYPE = 'content-type';
const HEADER_CONTENT_LENGTH = 'content-length';
const HEADER_ETAG = 'etag';
// Error messages
const ERROR_PREFIX = '[s3mini] ';
const ERROR_ACCESS_KEY_REQUIRED = `${ERROR_PREFIX}accessKeyId must be a non-empty string`;
const ERROR_SECRET_KEY_REQUIRED = `${ERROR_PREFIX}secretAccessKey must be a non-empty string`;
const ERROR_ENDPOINT_REQUIRED = `${ERROR_PREFIX}endpoint must be a non-empty string`;
const ERROR_ENDPOINT_FORMAT = `${ERROR_PREFIX}endpoint must be a valid URL. Expected format: https://<host>[:port][/base-path]`;
const ERROR_KEY_REQUIRED = `${ERROR_PREFIX}key must be a non-empty string`;
const ERROR_UPLOAD_ID_REQUIRED = `${ERROR_PREFIX}uploadId must be a non-empty string`;
const ERROR_DATA_BUFFER_REQUIRED = `${ERROR_PREFIX}data must be a Buffer or string`;
const ERROR_PREFIX_TYPE = `${ERROR_PREFIX}prefix must be a string`;
const ERROR_DELIMITER_REQUIRED = `${ERROR_PREFIX}delimiter must be a string`;
const ENCODR = new TextEncoder();
const chunkSize = 0x8000; // 32KB chunks
const HEXS = '0123456789abcdef';
const getByteSize = (data) => {
if (typeof data === 'string') {
return ENCODR.encode(data).byteLength;
}
if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
return data.byteLength;
}
if (data instanceof Blob) {
return data.size;
}
throw new Error('Unsupported data type');
};
/**
* Turn a raw ArrayBuffer into its hexadecimal representation.
* @param {ArrayBuffer} buffer The raw bytes.
* @returns {string} Hexadecimal string
*/
const hexFromBuffer = (buffer) => {
const bytes = new Uint8Array(buffer);
let hex = '';
for (const byte of bytes) {
hex += HEXS[byte >> 4] + HEXS[byte & 0x0f];
}
return hex;
};
/**
* Turn a raw ArrayBuffer into its base64 representation.
* @param {ArrayBuffer} buffer The raw bytes.
* @returns {string} Base64 string
*/
const base64FromBuffer = (buffer) => {
const bytes = new Uint8Array(buffer);
let result = '';
for (let i = 0; i < bytes.length; i += chunkSize) {
const chunk = bytes.subarray(i, i + chunkSize);
result += btoa(String.fromCharCode.apply(null, chunk));
}
return result;
};
/**
* Compute SHA-256 hash of arbitrary string data.
* @param {string} content The content to be hashed.
* @returns {ArrayBuffer} The raw hash
*/
const sha256 = async (content) => {
const data = ENCODR.encode(content);
return await globalThis.crypto.subtle.digest('SHA-256', data);
};
/**
* Compute HMAC-SHA-256 of arbitrary data.
* @param {string|ArrayBuffer} key The key used to sign the content.
* @param {string} content The content to be signed.
* @returns {ArrayBuffer} The raw signature
*/
const hmac = async (key, content) => {
const secret = await globalThis.crypto.subtle.importKey('raw', typeof key === 'string' ? ENCODR.encode(key) : key, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']);
const data = ENCODR.encode(content);
return await globalThis.crypto.subtle.sign('HMAC', secret, data);
};
/**
* Sanitize ETag value by removing quotes and XML entities
* @param etag ETag value to sanitize
* @returns Sanitized ETag
*/
const sanitizeETag = (etag) => {
const replaceChars = {
'"': '',
'"': '',
'"': '',
'"': '',
'"': '',
};
return etag.replace(/(^("|"|"))|(("|"|")$)/g, m => replaceChars[m]);
};
const entityMap = {
'"': '"',
''': "'",
'<': '<',
'>': '>',
'&': '&',
};
/**
* Escape special characters for XML
* @param value String to escape
* @returns XML-escaped string
*/
const escapeXml = (value) => {
return value
.replace(/&/g, '&')
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"')
.replace(/'/g, ''');
};
const unescapeXml = (value) => value.replace(/&(quot|apos|lt|gt|amp);/g, m => entityMap[m] ?? m);
/**
* Parse a very small subset of XML into a JS structure.
*
* @param input raw XML string
* @returns string for leaf nodes, otherwise a map of children
*/
const parseXml = (input) => {
const xmlContent = input.replace(/<\?xml[^?]*\?>\s*/, '');
const RE_TAG = /<([A-Za-z_][\w\-.]*)[^>]*>([\s\S]*?)<\/\1>/gm;
const result = {}; // strong type, no `any`
let match;
while ((match = RE_TAG.exec(xmlContent)) !== null) {
const tagName = match[1];
const innerContent = match[2];
const node = innerContent ? parseXml(innerContent) : unescapeXml(innerContent?.trim() || '');
if (!tagName) {
continue;
}
const current = result[tagName];
if (current === undefined) {
// First occurrence
result[tagName] = node;
}
else if (Array.isArray(current)) {
// Already an array
current.push(node);
}
else {
// Promote to array on the second occurrence
result[tagName] = [current, node];
}
}
// No child tags? β return the text, after entity decode
return Object.keys(result).length > 0 ? result : unescapeXml(xmlContent.trim());
};
/**
* Encode a character as a URI percent-encoded hex value
* @param c Character to encode
* @returns Percent-encoded character
*/
const encodeAsHex = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`;
/**
* Escape a URI string using percent encoding
* @param uriStr URI string to escape
* @returns Escaped URI string
*/
const uriEscape = (uriStr) => {
return encodeURIComponent(uriStr).replace(/[!'()*]/g, encodeAsHex);
};
/**
* Escape a URI resource path while preserving forward slashes
* @param string URI path to escape
* @returns Escaped URI path
*/
const uriResourceEscape = (string) => {
return uriEscape(string).replace(/%2F/g, '/');
};
const extractErrCode = (e) => {
if (typeof e !== 'object' || e === null) {
return undefined;
}
const err = e;
if (typeof err.code === 'string') {
return err.code;
}
return typeof err.cause?.code === 'string' ? err.cause.code : undefined;
};
class S3Error extends Error {
code;
constructor(msg, code, cause) {
super(msg);
this.name = new.target.name; // keeps instanceof usable
this.code = code;
this.cause = cause;
}
}
class S3NetworkError extends S3Error {
}
class S3ServiceError extends S3Error {
status;
serviceCode;
body;
constructor(msg, status, serviceCode, body) {
super(msg, serviceCode);
this.status = status;
this.serviceCode = serviceCode;
this.body = body;
}
}
/**
* Run async-returning tasks in batches with an *optional* minimum
* spacing (minIntervalMs) between the *start* times of successive batches.
*
* @param {Iterable<() => Promise<unknonw>>} tasks β functions returning Promises
* @param {number} [batchSize=30] β max concurrent requests
* @param {number} [minIntervalMs=0] β β₯0; 0 means βno pacingβ
* @returns {Promise<Array<PromiseSettledResult<T>>>}
*/
const runInBatches = async (tasks, batchSize = 30, minIntervalMs = 0) => {
const allResults = [];
let batch = [];
for (const task of tasks) {
batch.push(task);
if (batch.length === batchSize) {
await executeBatch(batch);
batch = [];
}
}
if (batch.length) {
await executeBatch(batch);
}
return allResults;
// βββββββββ helpers ββββββββββ
async function executeBatch(batchFns) {
const start = Date.now();
const settled = await Promise.allSettled(batchFns.map((fn) => fn()));
allResults.push(...settled);
if (minIntervalMs > 0) {
const wait = minIntervalMs - (Date.now() - start);
if (wait > 0) {
await new Promise((resolve) => setTimeout(resolve, wait));
}
}
}
};
/**
* S3 class for interacting with S3-compatible object storage services.
* This class provides methods for common S3 operations such as uploading, downloading,
* and deleting objects, as well as multipart uploads.
*
* @class
* @example
* const s3 = new CoreS3({
* accessKeyId: 'your-access-key',
* secretAccessKey: 'your-secret-key',
* endpoint: 'https://your-s3-endpoint.com/bucket-name',
* region: 'auto' // by default is auto
* });
*
* // Upload a file
* await s3.putObject('example.txt', 'Hello, World!');
*
* // Download a file
* const content = await s3.getObject('example.txt');
*
* // Delete a file
* await s3.deleteObject('example.txt');
*/
class S3mini {
/**
* Creates an instance of the S3 class.
*
* @constructor
* @param {Object} config - Configuration options for the S3 instance.
* @param {string} config.accessKeyId - The access key ID for authentication.
* @param {string} config.secretAccessKey - The secret access key for authentication.
* @param {string} config.endpoint - The endpoint URL of the S3-compatible service.
* @param {string} [config.region='auto'] - The region of the S3 service.
* @param {number} [config.requestSizeInBytes=8388608] - The request size of a single request in bytes (AWS S3 is 8MB).
* @param {number} [config.requestAbortTimeout=undefined] - The timeout in milliseconds after which a request should be aborted (careful on streamed requests).
* @param {Object} [config.logger=null] - A logger object with methods like info, warn, error.
* @throws {TypeError} Will throw an error if required parameters are missing or of incorrect type.
*/
accessKeyId;
secretAccessKey;
endpoint;
region;
bucketName;
requestSizeInBytes;
requestAbortTimeout;
logger;
signingKeyDate;
signingKey;
constructor({ accessKeyId, secretAccessKey, endpoint, region = 'auto', requestSizeInBytes = DEFAULT_REQUEST_SIZE_IN_BYTES, requestAbortTimeout = undefined, logger = undefined, }) {
this._validateConstructorParams(accessKeyId, secretAccessKey, endpoint);
this.accessKeyId = accessKeyId;
this.secretAccessKey = secretAccessKey;
this.endpoint = new URL(this._ensureValidUrl(endpoint));
this.region = region;
this.bucketName = this._extractBucketName();
this.requestSizeInBytes = requestSizeInBytes;
this.requestAbortTimeout = requestAbortTimeout;
this.logger = logger;
}
_sanitize(obj) {
if (typeof obj !== 'object' || obj === null) {
return obj;
}
return Object.keys(obj).reduce((acc, key) => {
if (SENSITIVE_KEYS_REDACTED.includes(key.toLowerCase())) {
acc[key] = '[REDACTED]';
}
else if (typeof obj[key] === 'object' &&
obj[key] !== null) {
acc[key] = this._sanitize(obj[key]);
}
else {
acc[key] = obj[key];
}
return acc;
}, Array.isArray(obj) ? [] : {});
}
_log(level, message, additionalData = {}) {
if (this.logger && typeof this.logger[level] === 'function') {
// Function to recursively sanitize an object
// Sanitize the additional data
const sanitizedData = this._sanitize(additionalData);
// Prepare the log entry
const logEntry = {
timestamp: new Date().toISOString(),
level,
message,
details: sanitizedData,
// Include some general context, but sanitize sensitive parts
context: this._sanitize({
region: this.region,
endpoint: this.endpoint.toString(),
// Only include the first few characters of the access key, if it exists
accessKeyId: this.accessKeyId ? `${this.accessKeyId.substring(0, 4)}...` : undefined,
}),
};
// Log the sanitized entry
this.logger[level](JSON.stringify(logEntry));
}
}
_validateConstructorParams(accessKeyId, secretAccessKey, endpoint) {
if (typeof accessKeyId !== 'string' || accessKeyId.trim().length === 0) {
throw new TypeError(ERROR_ACCESS_KEY_REQUIRED);
}
if (typeof secretAccessKey !== 'string' || secretAccessKey.trim().length === 0) {
throw new TypeError(ERROR_SECRET_KEY_REQUIRED);
}
if (typeof endpoint !== 'string' || endpoint.trim().length === 0) {
throw new TypeError(ERROR_ENDPOINT_REQUIRED);
}
}
_ensureValidUrl(raw) {
const candidate = /^(https?:)?\/\//i.test(raw) ? raw : `https://${raw}`;
try {
new URL(candidate);
// Find the last non-slash character
let endIndex = candidate.length;
while (endIndex > 0 && candidate[endIndex - 1] === '/') {
endIndex--;
}
return endIndex === candidate.length ? candidate : candidate.substring(0, endIndex);
}
catch {
const msg = `${ERROR_ENDPOINT_FORMAT} But provided: "${raw}"`;
this._log('error', msg);
throw new TypeError(msg);
}
}
_validateMethodIsGetOrHead(method) {
if (method !== 'GET' && method !== 'HEAD') {
this._log('error', `${ERROR_PREFIX}method must be either GET or HEAD`);
throw new Error(`${ERROR_PREFIX}method must be either GET or HEAD`);
}
}
_checkKey(key) {
if (typeof key !== 'string' || key.trim().length === 0) {
this._log('error', ERROR_KEY_REQUIRED);
throw new TypeError(ERROR_KEY_REQUIRED);
}
}
_checkDelimiter(delimiter) {
if (typeof delimiter !== 'string' || delimiter.trim().length === 0) {
this._log('error', ERROR_DELIMITER_REQUIRED);
throw new TypeError(ERROR_DELIMITER_REQUIRED);
}
}
_checkPrefix(prefix) {
if (typeof prefix !== 'string') {
this._log('error', ERROR_PREFIX_TYPE);
throw new TypeError(ERROR_PREFIX_TYPE);
}
}
// private _checkMaxKeys(maxKeys: number): void {
// if (typeof maxKeys !== 'number' || maxKeys <= 0) {
// this._log('error', C.ERROR_MAX_KEYS_TYPE);
// throw new TypeError(C.ERROR_MAX_KEYS_TYPE);
// }
// }
_checkOpts(opts) {
if (typeof opts !== 'object') {
this._log('error', `${ERROR_PREFIX}opts must be an object`);
throw new TypeError(`${ERROR_PREFIX}opts must be an object`);
}
}
_filterIfHeaders(opts) {
const filteredOpts = {};
const conditionalHeaders = {};
const ifHeaders = ['if-match', 'if-none-match', 'if-modified-since', 'if-unmodified-since'];
for (const [key, value] of Object.entries(opts)) {
if (ifHeaders.includes(key.toLowerCase())) {
// Convert to lowercase for consistency
conditionalHeaders[key] = value;
}
else {
filteredOpts[key] = value;
}
}
return { filteredOpts, conditionalHeaders };
}
_validateData(data) {
if (!((globalThis.Buffer && data instanceof globalThis.Buffer) || typeof data === 'string')) {
this._log('error', ERROR_DATA_BUFFER_REQUIRED);
throw new TypeError(ERROR_DATA_BUFFER_REQUIRED);
}
return data;
}
_validateUploadPartParams(key, uploadId, data, partNumber, opts) {
this._checkKey(key);
if (typeof uploadId !== 'string' || uploadId.trim().length === 0) {
this._log('error', ERROR_UPLOAD_ID_REQUIRED);
throw new TypeError(ERROR_UPLOAD_ID_REQUIRED);
}
if (!Number.isInteger(partNumber) || partNumber <= 0) {
this._log('error', `${ERROR_PREFIX}partNumber must be a positive integer`);
throw new TypeError(`${ERROR_PREFIX}partNumber must be a positive integer`);
}
this._checkOpts(opts);
return this._validateData(data);
}
async _sign(method, keyPath, query = {}, headers = {}) {
// Create URL without appending keyPath first
const url = new URL(this.endpoint);
// Properly format the pathname to avoid double slashes
if (keyPath && keyPath.length > 0) {
url.pathname =
url.pathname === '/' ? `/${keyPath.replace(/^\/+/, '')}` : `${url.pathname}/${keyPath.replace(/^\/+/, '')}`;
}
const d = new Date();
const year = d.getUTCFullYear();
const month = String(d.getUTCMonth() + 1).padStart(2, '0');
const day = String(d.getUTCDate()).padStart(2, '0');
const shortDatetime = `${year}${month}${day}`;
const fullDatetime = `${shortDatetime}T${String(d.getUTCHours()).padStart(2, '0')}${String(d.getUTCMinutes()).padStart(2, '0')}${String(d.getUTCSeconds()).padStart(2, '0')}Z`;
const credentialScope = `${shortDatetime}/${this.region}/${S3_SERVICE}/${AWS_REQUEST_TYPE}`;
headers[HEADER_AMZ_CONTENT_SHA256] = UNSIGNED_PAYLOAD;
headers[HEADER_AMZ_DATE] = fullDatetime;
headers[HEADER_HOST] = url.host;
const ignoredHeaders = new Set(['authorization', 'content-length', 'content-type', 'user-agent']);
let canonicalHeaders = '';
let signedHeaders = '';
for (const [key, value] of Object.entries(headers).sort(([a], [b]) => a.localeCompare(b))) {
const lowerKey = key.toLowerCase();
if (!ignoredHeaders.has(lowerKey)) {
if (canonicalHeaders) {
canonicalHeaders += '\n';
signedHeaders += ';';
}
canonicalHeaders += `${lowerKey}:${String(value).trim()}`;
signedHeaders += lowerKey;
}
}
const canonicalRequest = `${method}\n${url.pathname}\n${this._buildCanonicalQueryString(query)}\n${canonicalHeaders}\n\n${signedHeaders}\n${UNSIGNED_PAYLOAD}`;
const stringToSign = `${AWS_ALGORITHM}\n${fullDatetime}\n${credentialScope}\n${hexFromBuffer(await sha256(canonicalRequest))}`;
if (shortDatetime !== this.signingKeyDate) {
this.signingKeyDate = shortDatetime;
this.signingKey = await this._getSignatureKey(shortDatetime);
}
const signature = hexFromBuffer(await hmac(this.signingKey, stringToSign));
headers[HEADER_AUTHORIZATION] =
`${AWS_ALGORITHM} Credential=${this.accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
return { url: url.toString(), headers };
}
async _signedRequest(method, // 'GET' | 'HEAD' | 'PUT' | 'POST' | 'DELETE'
key, // ββ allowed for bucketβlevel ops
{ query = {}, // ?query=string
body = '', // BodyInit | undefined
headers = {}, // extra/override headers
tolerated = [], // [200, 404] etc.
withQuery = false, // append query string to signed URL
} = {}) {
// Basic validation
// if (!['GET', 'HEAD', 'PUT', 'POST', 'DELETE'].includes(method)) {
// throw new Error(`${C.ERROR_PREFIX}Unsupported HTTP method ${method as string}`);
// }
const { filteredOpts, conditionalHeaders } = ['GET', 'HEAD'].includes(method)
? this._filterIfHeaders(query)
: { filteredOpts: query, conditionalHeaders: {} };
const baseHeaders = {
[HEADER_AMZ_CONTENT_SHA256]: UNSIGNED_PAYLOAD,
// ...(['GET', 'HEAD'].includes(method) ? { [C.HEADER_CONTENT_TYPE]: C.JSON_CONTENT_TYPE } : {}),
...headers,
...conditionalHeaders,
};
const encodedKey = key ? uriResourceEscape(key) : '';
const { url, headers: signedHeaders } = await this._sign(method, encodedKey, filteredOpts, baseHeaders);
if (Object.keys(query).length > 0) {
withQuery = true; // append query string to signed URL
}
const filteredOptsStrings = Object.fromEntries(Object.entries(filteredOpts).map(([k, v]) => [k, String(v)]));
const finalUrl = withQuery && Object.keys(filteredOpts).length ? `${url}?${new URLSearchParams(filteredOptsStrings)}` : url;
const signedHeadersString = Object.fromEntries(Object.entries(signedHeaders).map(([k, v]) => [k, String(v)]));
return this._sendRequest(finalUrl, method, signedHeadersString, body, tolerated);
}
/**
* Sanitizes an ETag value by removing surrounding quotes and whitespace.
* Still returns RFC compliant ETag. https://www.rfc-editor.org/rfc/rfc9110#section-8.8.3
* @param {string} etag - The ETag value to sanitize.
* @returns {string} The sanitized ETag value.
* @example
* const cleanEtag = s3.sanitizeETag('"abc123"'); // Returns: 'abc123'
*/
sanitizeETag(etag) {
return sanitizeETag(etag);
}
/**
* Creates a new bucket.
* This method sends a request to create a new bucket in the specified in endpoint.
* @returns A promise that resolves to true if the bucket was created successfully, false otherwise.
*/
async createBucket() {
const xmlBody = `
<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<LocationConstraint>${this.region}</LocationConstraint>
</CreateBucketConfiguration>
`;
const headers = {
[HEADER_CONTENT_TYPE]: XML_CONTENT_TYPE,
[HEADER_CONTENT_LENGTH]: getByteSize(xmlBody),
};
const res = await this._signedRequest('PUT', '', {
body: xmlBody,
headers,
tolerated: [200, 404, 403, 409], // donβt throw on 404/403 // 409 = bucket already exists
});
return res.status === 200;
}
_extractBucketName() {
const url = this.endpoint;
// First check if bucket is in the pathname (path-style URLs)
const pathSegments = url.pathname.split('/').filter(p => p);
if (pathSegments.length > 0) {
if (typeof pathSegments[0] === 'string') {
return pathSegments[0];
}
}
// Otherwise extract from subdomain (virtual-hosted-style URLs)
const hostParts = url.hostname.split('.');
// Common patterns:
// bucket-name.s3.amazonaws.com
// bucket-name.s3.region.amazonaws.com
// bucket-name.region.digitaloceanspaces.com
// bucket-name.region.cdn.digitaloceanspaces.com
if (hostParts.length >= 3) {
// Check if it's a known S3-compatible service
const domain = hostParts.slice(-2).join('.');
const knownDomains = ['amazonaws.com', 'digitaloceanspaces.com', 'cloudflare.com'];
if (knownDomains.some(d => domain.includes(d))) {
if (typeof hostParts[0] === 'string') {
return hostParts[0];
}
}
}
// Fallback: use the first subdomain
return hostParts[0] || '';
}
/**
* Checks if a bucket exists.
* This method sends a request to check if the specified bucket exists in the S3-compatible service.
* @returns A promise that resolves to true if the bucket exists, false otherwise.
*/
async bucketExists() {
const res = await this._signedRequest('HEAD', '', { tolerated: [200, 404, 403] });
return res.status === 200;
}
/**
* Lists objects in the bucket with optional filtering and no pagination.
* This method retrieves all objects matching the criteria (not paginated like listObjectsV2).
* @param {string} [delimiter='/'] - The delimiter to use for grouping objects.
* @param {string} [prefix=''] - The prefix to filter objects by.
* @param {number} [maxKeys] - The maximum number of keys to return. If not provided, all keys will be returned.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @returns {Promise<IT.ListObject[] | null>} A promise that resolves to an array of objects or null if the bucket is empty.
* @example
* // List all objects
* const objects = await s3.listObjects();
*
* // List objects with prefix
* const photos = await s3.listObjects('/', 'photos/', 100);
*/
async listObjects(delimiter = '/', prefix = '', maxKeys, opts = {}) {
this._checkDelimiter(delimiter);
this._checkPrefix(prefix);
this._checkOpts(opts);
const keyPath = delimiter === '/' ? delimiter : uriEscape(delimiter);
const unlimited = !(maxKeys && maxKeys > 0);
let remaining = unlimited ? Infinity : maxKeys;
let token;
const all = [];
do {
const batchResult = await this._fetchObjectBatch(keyPath, prefix, remaining, token, opts);
if (batchResult === null) {
return null; // 404 - bucket not found
}
all.push(...batchResult.objects);
if (!unlimited) {
remaining -= batchResult.objects.length;
}
token = batchResult.continuationToken;
} while (token && remaining > 0);
return all;
}
async _fetchObjectBatch(keyPath, prefix, remaining, token, opts) {
const query = this._buildListObjectsQuery(prefix, remaining, token, opts);
const res = await this._signedRequest('GET', keyPath, {
query,
withQuery: true,
tolerated: [200, 404],
});
if (res.status === 404) {
return null;
}
if (res.status !== 200) {
await this._handleListObjectsError(res);
}
const xmlText = await res.text();
return this._parseListObjectsResponse(xmlText);
}
_buildListObjectsQuery(prefix, remaining, token, opts) {
const batchSize = Math.min(remaining, 1000); // S3 ceiling
return {
'list-type': LIST_TYPE, // =2 for V2
'max-keys': String(batchSize),
...(prefix ? { prefix } : {}),
...(token ? { 'continuation-token': token } : {}),
...opts,
};
}
async _handleListObjectsError(res) {
const errorBody = await res.text();
const parsedErrorBody = this._parseErrorXml(res.headers, errorBody);
const errorCode = res.headers.get('x-amz-error-code') ?? parsedErrorBody.svcCode ?? 'Unknown';
const errorMessage = res.headers.get('x-amz-error-message') ?? parsedErrorBody.errorMessage ?? res.statusText;
this._log('error', `${ERROR_PREFIX}Request failed with status ${res.status}: ${errorCode} - ${errorMessage}, err body: ${errorBody}`);
throw new Error(`${ERROR_PREFIX}Request failed with status ${res.status}: ${errorCode} - ${errorMessage}, err body: ${errorBody}`);
}
_parseListObjectsResponse(xmlText) {
const raw = parseXml(xmlText);
if (typeof raw !== 'object' || !raw || 'error' in raw) {
this._log('error', `${ERROR_PREFIX}Unexpected listObjects response shape: ${JSON.stringify(raw)}`);
throw new Error(`${ERROR_PREFIX}Unexpected listObjects response shape`);
}
const out = (raw.ListBucketResult || raw.listBucketResult || raw);
const objects = this._extractObjectsFromResponse(out);
const continuationToken = this._extractContinuationToken(out);
return { objects, continuationToken };
}
_extractObjectsFromResponse(response) {
const contents = response.Contents || response.contents; // S3 v2 vs v1
if (!contents) {
return [];
}
return Array.isArray(contents) ? contents : [contents];
}
_extractContinuationToken(response) {
const truncated = response.IsTruncated === 'true' || response.isTruncated === 'true' || false;
if (!truncated) {
return undefined;
}
return (response.NextContinuationToken ||
response.nextContinuationToken ||
response.NextMarker ||
response.nextMarker);
}
/**
* Lists multipart uploads in the bucket.
* This method sends a request to list multipart uploads in the specified bucket.
* @param {string} [delimiter='/'] - The delimiter to use for grouping uploads.
* @param {string} [prefix=''] - The prefix to filter uploads by.
* @param {IT.HttpMethod} [method='GET'] - The HTTP method to use for the request (GET or HEAD).
* @param {Record<string, string | number | boolean | undefined>} [opts={}] - Additional options for the request.
* @returns A promise that resolves to a list of multipart uploads or an error.
*/
async listMultipartUploads(delimiter = '/', prefix = '', method = 'GET', opts = {}) {
this._checkDelimiter(delimiter);
this._checkPrefix(prefix);
this._validateMethodIsGetOrHead(method);
this._checkOpts(opts);
const query = { uploads: '', ...opts };
const keyPath = delimiter === '/' ? delimiter : uriEscape(delimiter);
const res = await this._signedRequest(method, keyPath, {
query,
withQuery: true,
});
// doublecheck if this is needed
// if (method === 'HEAD') {
// return {
// size: +(res.headers.get(C.HEADER_CONTENT_LENGTH) ?? '0'),
// mtime: res.headers.get(C.HEADER_LAST_MODIFIED) ? new Date(res.headers.get(C.HEADER_LAST_MODIFIED)!) : undefined,
// etag: res.headers.get(C.HEADER_ETAG) ?? '',
// };
// }
const raw = parseXml(await res.text());
if (typeof raw !== 'object' || raw === null) {
throw new Error(`${ERROR_PREFIX}Unexpected listMultipartUploads response shape`);
}
if ('listMultipartUploadsResult' in raw) {
return raw.listMultipartUploadsResult;
}
return raw;
}
/**
* Get an object from the S3-compatible service.
* This method sends a request to retrieve the specified object from the S3-compatible service.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the object data (string) or null if not found.
*/
async getObject(key, opts = {}, ssecHeaders) {
// if ssecHeaders is set, add it to headers
const res = await this._signedRequest('GET', key, {
query: opts, // use opts.query if it exists, otherwise use an empty object
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return null;
}
return res.text();
}
/**
* Get an object response from the S3-compatible service.
* This method sends a request to retrieve the specified object and returns the full response.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the Response object or null if not found.
*/
async getObjectResponse(key, opts = {}, ssecHeaders) {
const res = await this._signedRequest('GET', key, {
query: opts,
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return null;
}
return res;
}
/**
* Get an object as an ArrayBuffer from the S3-compatible service.
* This method sends a request to retrieve the specified object and returns it as an ArrayBuffer.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the object data as an ArrayBuffer or null if not found.
*/
async getObjectArrayBuffer(key, opts = {}, ssecHeaders) {
const res = await this._signedRequest('GET', key, {
query: opts,
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return null;
}
return res.arrayBuffer();
}
/**
* Get an object as JSON from the S3-compatible service.
* This method sends a request to retrieve the specified object and returns it as JSON.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the object data as JSON or null if not found.
*/
async getObjectJSON(key, opts = {}, ssecHeaders) {
const res = await this._signedRequest('GET', key, {
query: opts,
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return null;
}
return res.json();
}
/**
* Get an object with its ETag from the S3-compatible service.
* This method sends a request to retrieve the specified object and its ETag.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to an object containing the ETag and the object data as an ArrayBuffer or null if not found.
*/
async getObjectWithETag(key, opts = {}, ssecHeaders) {
try {
const res = await this._signedRequest('GET', key, {
query: opts,
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return { etag: null, data: null };
}
const etag = res.headers.get(HEADER_ETAG);
if (!etag) {
throw new Error(`${ERROR_PREFIX}ETag not found in response headers`);
}
return { etag: sanitizeETag(etag), data: await res.arrayBuffer() };
}
catch (err) {
this._log('error', `Error getting object ${key} with ETag: ${String(err)}`);
throw err;
}
}
/**
* Get an object as a raw response from the S3-compatible service.
* This method sends a request to retrieve the specified object and returns the raw response.
* @param {string} key - The key of the object to retrieve.
* @param {boolean} [wholeFile=true] - Whether to retrieve the whole file or a range.
* @param {number} [rangeFrom=0] - The starting byte for the range (if not whole file).
* @param {number} [rangeTo=this.requestSizeInBytes] - The ending byte for the range (if not whole file).
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the Response object.
*/
async getObjectRaw(key, wholeFile = true, rangeFrom = 0, rangeTo = this.requestSizeInBytes, opts = {}, ssecHeaders) {
const rangeHdr = wholeFile ? {} : { range: `bytes=${rangeFrom}-${rangeTo - 1}` };
return this._signedRequest('GET', key, {
query: { ...opts },
headers: { ...rangeHdr, ...ssecHeaders },
withQuery: true, // keep ?query=string behaviour
});
}
/**
* Get the content length of an object.
* This method sends a HEAD request to retrieve the content length of the specified object.
* @param {string} key - The key of the object to retrieve the content length for.
* @returns A promise that resolves to the content length of the object in bytes, or 0 if not found.
* @throws {Error} If the content length header is not found in the response.
*/
async getContentLength(key, ssecHeaders) {
try {
const res = await this._signedRequest('HEAD', key, {
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
const len = res.headers.get(HEADER_CONTENT_LENGTH);
return len ? +len : 0;
}
catch (err) {
this._log('error', `Error getting content length for object ${key}: ${String(err)}`);
throw new Error(`${ERROR_PREFIX}Error getting content length for object ${key}: ${String(err)}`);
}
}
/**
* Checks if an object exists in the S3-compatible service.
* This method sends a HEAD request to check if the specified object exists.
* @param {string} key - The key of the object to check.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @returns A promise that resolves to true if the object exists, false if not found, or null if ETag mismatch.
*/
async objectExists(key, opts = {}) {
const res = await this._signedRequest('HEAD', key, {
query: opts,
tolerated: [200, 404, 412, 304],
});
if (res.status === 404) {
return false; // not found
}
if (res.status === 412 || res.status === 304) {
return null; // ETag mismatch
}
return true; // found (200)
}
/**
* Retrieves the ETag of an object without downloading its content.
* @param {string} key - The key of the object to retrieve the ETag for.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<string | null>} A promise that resolves to the ETag value or null if the object is not found.
* @throws {Error} If the ETag header is not found in the response.
* @example
* const etag = await s3.getEtag('path/to/file.txt');
* if (etag) {
* console.log(`File ETag: ${etag}`);
* }
*/
async getEtag(key, opts = {}, ssecHeaders) {
const res = await this._signedRequest('HEAD', key, {
query: opts,
tolerated: [200, 304, 404, 412],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if (res.status === 404) {
return null;
}
if (res.status === 412 || res.status === 304) {
return null; // ETag mismatch
}
const etag = res.headers.get(HEADER_ETAG);
if (!etag) {
throw new Error(`${ERROR_PREFIX}ETag not found in response headers`);
}
return sanitizeETag(etag);
}
/**
* Uploads an object to the S3-compatible service.
* @param {string} key - The key/path where the object will be stored.
* @param {string | Buffer} data - The data to upload (string or Buffer).
* @param {string} [fileType='application/octet-stream'] - The MIME type of the file.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @param {IT.AWSHeaders} [additionalHeaders] - Additional x-amz-* headers specific to this request, if any.
* @returns {Promise<Response>} A promise that resolves to the Response object from the upload request.
* @throws {TypeError} If data is not a string or Buffer.
* @example
* // Upload text file
* await s3.putObject('hello.txt', 'Hello, World!', 'text/plain');
*
* // Upload binary data
* const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47]);
* await s3.putObject('image.png', buffer, 'image/png');
*/
async putObject(key, data, fileType = DEFAULT_STREAM_CONTENT_TYPE, ssecHeaders, additionalHeaders) {
return this._signedRequest('PUT', key, {
body: this._validateData(data),
headers: {
[HEADER_CONTENT_LENGTH]: getByteSize(data),
[HEADER_CONTENT_TYPE]: fileType,
...additionalHeaders,
...ssecHeaders,
},
tolerated: [200],
});
}
/**
* Initiates a multipart upload and returns the upload ID.
* @param {string} key - The key/path where the object will be stored.
* @param {string} [fileType='application/octet-stream'] - The MIME type of the file.
* @param {IT.SSECHeaders?} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<string>} A promise that resolves to the upload ID for the multipart upload.
* @throws {TypeError} If key is invalid or fileType is not a string.
* @throws {Error} If the multipart upload fails to initialize.
* @example
* const uploadId = await s3.getMultipartUploadId('large-file.zip', 'application/zip');
* console.log(`Started multipart upload: ${uploadId}`);
*/
async getMultipartUploadId(key, fileType = DEFAULT_STREAM_CONTENT_TYPE, ssecHeaders) {
this._checkKey(key);
if (typeof fileType !== 'string') {
throw new TypeError(`${ERROR_PREFIX}fileType must be a string`);
}
const query = { uploads: '' };
const headers = { [HEADER_CONTENT_TYPE]: fileType, ...ssecHeaders };
const res = await this._signedRequest('POST', key, {
query,
headers,
withQuery: true,
});
const parsed = parseXml(await res.text());
if (parsed && typeof parsed === 'object') {
// Check for both cases of InitiateMultipartUploadResult
const uploadResult = parsed.initiateMultipartUploadResult ||
parsed.InitiateMultipartUploadResult;
if (uploadResult && typeof uploadResult === 'object') {
// Check for both cases of uploadId
const uploadId = uploadResult.uploadId || uploadResult.UploadId;
if (uploadId && typeof uploadId === 'string') {
return uploadId;
}
}
}
throw new Error(`${ERROR_PREFIX}Failed to create multipart upload: ${JSON.stringify(parsed)}`);
}
/**
* Uploads a part in a multipart upload.
* @param {string} key - The key of the object being uploaded.
* @param {string} uploadId - The upload ID from getMultipartUploadId.
* @param {Buffer | string} data - The data for this part.
* @param {number} partNumber - The part number (must be between 1 and 10,000).
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<IT.UploadPart>} A promise that resolves to an object containing the partNumber and etag.
* @throws {TypeError} If any parameter is invalid.
* @example
* const part = await s3.uploadPart(
* 'large-file.zip',
* uploadId,
* partData,
* 1
* );
* console.log(`Part ${part.partNumber} uploaded with ETag: ${part.etag}`);
*/
async uploadPart(key, uploadId, data, partNumber, opts = {}, ssecHeaders) {
const body = this._validateUploadPartParams(key, uploadId, data, partNumber, opts);
const query = { uploadId, partNumber, ...opts };
const res = await this._signedRequest('PUT', key, {
query,
body,
headers: {
[HEADER_CONTENT_LENGTH]: getByteSize(data),
...ssecHeaders,
},
});
return { partNumber, etag: sanitizeETag(res.headers.get('etag') || '') };
}
/**
* Completes a multipart upload by combining all uploaded parts.
* @param {string} key - The key of the object being uploaded.
* @param {string} uploadId - The upload ID from getMultipartUploadId.
* @param {Array<IT.UploadPart>} parts - Array of uploaded parts with partNumber and etag.
* @returns {Promise<IT.CompleteMultipartUploadResult>} A promise that resolves to the completion result containing the final ETag.
* @throws {Error} If the multipart upload fails to complete.
* @example
* const result = await s3.completeMultipartUpload(
* 'large-file.zip',
* uploadId,
* [
* { partNumber: 1, etag: 'abc123' },
* { partNumber: 2, etag: 'def456' }
* ]
* );
* console.log(`Upload completed with ETag: ${result.etag}`);
*/
async completeMultipartUpload(key, uploadId, parts) {
const query = { uploadId };
const xmlBody = this._buildCompleteMultipartUploadXml(parts);
const headers = {
[HEADER_CONTENT_TYPE]: XML_CONTENT_TYPE,
[HEADER_CONTENT_LENGTH]: getByteSize(xmlBody),
};
const res = await this._signedRequest('POST', key, {
query,
body: xmlBody,
headers,
withQuery: true,
});
const parsed = parseXml(await res.text());
if (parsed && typeof parsed === 'object') {
// Check for both cases
const result = parsed.completeMultipartUploadResult || parsed.CompleteMultipartUploadResult || parsed;
if (result && typeof result === 'object') {
const resultObj = result;
// Handle ETag in all its variations
const etag = resultObj.ETag || resultObj.eTag || resultObj.etag;
if (etag && typeof etag === 'string') {
return {
...resultObj,
etag: sanitizeETag(etag),
};
}
return result;
}
}
throw new Error(`${ERROR_PREFIX}Failed to complete multipart upload: ${JSON.stringify(parsed)}`);
}
/**
* Aborts a multipart upload and removes all uploaded parts.
* @param {string} key - The key of the object being uploaded.
* @param {string} uploadId - The upload ID to abort.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<object>} A promise that resolves to an object containing the abort status and details.
* @throws {TypeError} If key or uploadId is invalid.
* @throws {Error} If the abort operation fails.
* @example
* try {
* const result = await s3.abortMultipartUpload('large-file.zip', uploadId);
* console.log('Upload aborted:', result.status);
* } catch (error) {
* console.error('Failed to abort upload:', error);
* }
*/
async abortMultipartUpload(key, uploadId, ssecHeaders) {
this._checkKey(key);
if (!uploadId) {
throw new TypeError(ERROR_UPLOAD_ID_REQUIRED);
}
const query = { uploadId };
const headers = { [HEADER_CONTENT_TYPE]: XML_CONTENT_TYPE, ...(ssecHeaders ? { ...ssecHeaders } : {}) };
const res = await this._signedRequest('DELETE', key, {
query,
headers,
withQuery: true,
});
const parsed = parseXml(await res.text());
if (parsed &&
'error' in parsed &&
typeof parsed.error === 'object' &&
parsed.error !== null &&
'message' in parsed.error) {
this._log('error', `${ERROR_PREFIX}Failed to abort multipart upload: ${String(parsed.error.message)}`);
throw new Error(`${ERROR_PREFIX}Failed to abort multipart upload: ${String(parsed.error.message)}`);
}
return { status: 'Aborted', key, uploadId, response: parsed };
}
_buildCompleteMultipartUploadXml(parts) {
let xml = '<CompleteMultipartUpload>';
for (const part of parts) {
xml += `<Part><PartNumber>${part.partNumber}</PartNumber><ETag>${part.etag}</ETag></Part>`;
}
xml += '</CompleteMultipartUpload>';
return xml;
}
/**
* Executes the copy operation for local copying (same bucket/endpoint).
* @private
*/
async _executeCopyOperation(destinationKey, copySource, options) {
const { metadataDirective = 'COPY', metadata = {}, contentType, storageClass, taggingDirective, websiteRedirectLocation, sourceSSECHeaders = {}, destinationSSECHeaders = {}, additionalHeaders = {}, } = options;
const headers = {
'x-amz-copy-source': copySource,
'x-amz-metadata-directive':