s3mini
Version:
πΆ Tiny & fast S3 client for node and edge computing platforms
1,122 lines (1,120 loc) β’ 56.4 kB
JavaScript
// Constants
const AWS_ALGORITHM = 'AWS4-HMAC-SHA256';
const AWS_REQUEST_TYPE = 'aws4_request';
const S3_SERVICE = 's3';
const LIST_TYPE = '2';
const UNSIGNED_PAYLOAD = 'UNSIGNED-PAYLOAD';
const DEFAULT_STREAM_CONTENT_TYPE = 'application/octet-stream';
const XML_CONTENT_TYPE = 'application/xml';
// List of keys that might contain sensitive information
const SENSITIVE_KEYS_REDACTED = ['accessKeyId', 'secretAccessKey', 'sessionToken', 'password', 'token'];
const DEFAULT_REQUEST_SIZE_IN_BYTES = 8 * 1024 * 1024;
// Headers
const HEADER_AMZ_CONTENT_SHA256 = 'x-amz-content-sha256';
const HEADER_AMZ_DATE = 'x-amz-date';
const HEADER_HOST = 'host';
const HEADER_AUTHORIZATION = 'authorization';
const HEADER_CONTENT_TYPE = 'content-type';
const HEADER_CONTENT_LENGTH = 'content-length';
const HEADER_ETAG = 'etag';
// Error messages
const ERROR_PREFIX = '[s3mini] ';
const ERROR_ACCESS_KEY_REQUIRED = `${ERROR_PREFIX}accessKeyId must be a non-empty string`;
const ERROR_SECRET_KEY_REQUIRED = `${ERROR_PREFIX}secretAccessKey must be a non-empty string`;
const ERROR_ENDPOINT_REQUIRED = `${ERROR_PREFIX}endpoint must be a non-empty string`;
const ERROR_ENDPOINT_FORMAT = `${ERROR_PREFIX}endpoint must be a valid URL. Expected format: https://<host>[:port][/base-path]`;
const ERROR_KEY_REQUIRED = `${ERROR_PREFIX}key must be a non-empty string`;
const ERROR_UPLOAD_ID_REQUIRED = `${ERROR_PREFIX}uploadId must be a non-empty string`;
const ERROR_DATA_BUFFER_REQUIRED = `${ERROR_PREFIX}data must be a Buffer or string`;
// const ERROR_PATH_REQUIRED = `${ERROR_PREFIX}path must be a string`;
const ERROR_PREFIX_TYPE = `${ERROR_PREFIX}prefix must be a string`;
const ERROR_DELIMITER_REQUIRED = `${ERROR_PREFIX}delimiter must be a string`;
// Initialize crypto functions - this is needed for environments where `crypto` is not available globally
// e.g., in Cloudflare Workers or other non-Node.js environments with nodejs_flags enabled.
const _createHmac = crypto.createHmac || (await import('node:crypto')).createHmac;
const _createHash = crypto.createHash || (await import('node:crypto')).createHash;
/**
* Hash content using SHA-256
* @param {string|Buffer} content β data to hash
* @returns {string} Hex encoded hash
*/
const hash = (content) => {
return _createHash('sha256').update(content).digest('hex');
};
const md5base64 = (data) => {
return _createHash('md5').update(data).digest('base64');
};
/**
* Compute HMAC-SHA-256 of arbitrary data and return a hex string.
* @param {string|Buffer} key β secret key
* @param {string|Buffer} content β data to authenticate
* @param {BufferEncoding} [encoding='hex'] β hex | base64 | β¦
* @returns {string | Buffer} hex encoded HMAC
*/
const hmac = (key, content, encoding) => {
const mac = _createHmac('sha256', key).update(content);
return encoding ? mac.digest(encoding) : mac.digest();
};
/**
* Sanitize ETag value by removing quotes and XML entities
* @param etag ETag value to sanitize
* @returns Sanitized ETag
*/
const sanitizeETag = (etag) => {
const replaceChars = {
'"': '',
'"': '',
'"': '',
'"': '',
'"': '',
};
return etag.replace(/^("|"|")|("|"|")$/g, m => replaceChars[m]);
};
const entityMap = {
'"': '"',
''': "'",
'<': '<',
'>': '>',
'&': '&',
};
/**
* Escape special characters for XML
* @param value String to escape
* @returns XML-escaped string
*/
const escapeXml = (value) => {
return value
.replace(/&/g, '&')
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"')
.replace(/'/g, ''');
};
const unescapeXml = (value) => value.replace(/&(quot|apos|lt|gt|amp);/g, m => entityMap[m] ?? m);
/**
* Parse a very small subset of XML into a JS structure.
*
* @param input raw XML string
* @returns string for leaf nodes, otherwise a map of children
*/
const parseXml = (input) => {
const xmlContent = input.replace(/<\?xml[^?]*\?>\s*/, '');
const RE_TAG = /<([A-Za-z_][\w\-.]*)[^>]*>([\s\S]*?)<\/\1>/gm;
const result = {}; // strong type, no `any`
let match;
while ((match = RE_TAG.exec(xmlContent)) !== null) {
const tagName = match[1];
const innerContent = match[2];
const node = innerContent ? parseXml(innerContent) : unescapeXml(innerContent?.trim() || '');
if (!tagName) {
continue;
}
const current = result[tagName];
if (current === undefined) {
// First occurrence
result[tagName] = node;
}
else if (Array.isArray(current)) {
// Already an array
current.push(node);
}
else {
// Promote to array on the second occurrence
result[tagName] = [current, node];
}
}
// No child tags? β return the text, after entity decode
return Object.keys(result).length > 0 ? result : unescapeXml(xmlContent.trim());
};
/**
* Encode a character as a URI percent-encoded hex value
* @param c Character to encode
* @returns Percent-encoded character
*/
const encodeAsHex = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`;
/**
* Escape a URI string using percent encoding
* @param uriStr URI string to escape
* @returns Escaped URI string
*/
const uriEscape = (uriStr) => {
return encodeURIComponent(uriStr).replace(/[!'()*]/g, encodeAsHex);
};
/**
* Escape a URI resource path while preserving forward slashes
* @param string URI path to escape
* @returns Escaped URI path
*/
const uriResourceEscape = (string) => {
return uriEscape(string).replace(/%2F/g, '/');
};
const extractErrCode = (e) => {
if (typeof e !== 'object' || e === null) {
return undefined;
}
const err = e;
if (typeof err.code === 'string') {
return err.code;
}
return typeof err.cause?.code === 'string' ? err.cause.code : undefined;
};
class S3Error extends Error {
code;
constructor(msg, code, cause) {
super(msg);
this.name = new.target.name; // keeps instanceof usable
this.code = code;
this.cause = cause;
}
}
class S3NetworkError extends S3Error {
}
class S3ServiceError extends S3Error {
status;
serviceCode;
body;
constructor(msg, status, serviceCode, body) {
super(msg, serviceCode);
this.status = status;
this.serviceCode = serviceCode;
this.body = body;
}
}
/**
* Run async-returning tasks in batches with an *optional* minimum
* spacing (minIntervalMs) between the *start* times of successive batches.
*
* @param {Iterable<() => Promise<unknonw>>} tasks β functions returning Promises
* @param {number} [batchSize=30] β max concurrent requests
* @param {number} [minIntervalMs=0] β β₯0; 0 means βno pacingβ
* @returns {Promise<Array<PromiseSettledResult<T>>>}
*/
const runInBatches = async (tasks, batchSize = 30, minIntervalMs = 0) => {
const allResults = [];
let batch = [];
for (const task of tasks) {
batch.push(task);
if (batch.length === batchSize) {
await executeBatch(batch);
batch = [];
}
}
if (batch.length) {
await executeBatch(batch);
}
return allResults;
// βββββββββ helpers ββββββββββ
async function executeBatch(batchFns) {
const start = Date.now();
const settled = await Promise.allSettled(batchFns.map((fn) => fn()));
allResults.push(...settled);
if (minIntervalMs > 0) {
const wait = minIntervalMs - (Date.now() - start);
if (wait > 0) {
await new Promise((resolve) => setTimeout(resolve, wait));
}
}
}
};
/**
* S3 class for interacting with S3-compatible object storage services.
* This class provides methods for common S3 operations such as uploading, downloading,
* and deleting objects, as well as multipart uploads.
*
* @class
* @example
* const s3 = new CoreS3({
* accessKeyId: 'your-access-key',
* secretAccessKey: 'your-secret-key',
* endpoint: 'https://your-s3-endpoint.com',
* region: 'us-east-1' // by default is auto
* });
*
* // Upload a file
* await s3.putObject('example.txt', 'Hello, World!');
*
* // Download a file
* const content = await s3.getObject('example.txt');
*
* // Delete a file
* await s3.deleteObject('example.txt');
*/
class S3mini {
/**
* Creates an instance of the S3 class.
*
* @constructor
* @param {Object} config - Configuration options for the S3 instance.
* @param {string} config.accessKeyId - The access key ID for authentication.
* @param {string} config.secretAccessKey - The secret access key for authentication.
* @param {string} config.endpoint - The endpoint URL of the S3-compatible service.
* @param {string} [config.region='auto'] - The region of the S3 service.
* @param {number} [config.requestSizeInBytes=8388608] - The request size of a single request in bytes (AWS S3 is 8MB).
* @param {number} [config.requestAbortTimeout=undefined] - The timeout in milliseconds after which a request should be aborted (careful on streamed requests).
* @param {Object} [config.logger=null] - A logger object with methods like info, warn, error.
* @throws {TypeError} Will throw an error if required parameters are missing or of incorrect type.
*/
accessKeyId;
secretAccessKey;
endpoint;
region;
requestSizeInBytes;
requestAbortTimeout;
logger;
signingKeyDate;
signingKey;
constructor({ accessKeyId, secretAccessKey, endpoint, region = 'auto', requestSizeInBytes = DEFAULT_REQUEST_SIZE_IN_BYTES, requestAbortTimeout = undefined, logger = undefined, }) {
this._validateConstructorParams(accessKeyId, secretAccessKey, endpoint);
this.accessKeyId = accessKeyId;
this.secretAccessKey = secretAccessKey;
this.endpoint = this._ensureValidUrl(endpoint);
this.region = region;
this.requestSizeInBytes = requestSizeInBytes;
this.requestAbortTimeout = requestAbortTimeout;
this.logger = logger;
}
_sanitize(obj) {
if (typeof obj !== 'object' || obj === null) {
return obj;
}
return Object.keys(obj).reduce((acc, key) => {
if (SENSITIVE_KEYS_REDACTED.includes(key.toLowerCase())) {
acc[key] = '[REDACTED]';
}
else if (typeof obj[key] === 'object' &&
obj[key] !== null) {
acc[key] = this._sanitize(obj[key]);
}
else {
acc[key] = obj[key];
}
return acc;
}, Array.isArray(obj) ? [] : {});
}
_log(level, message, additionalData = {}) {
if (this.logger && typeof this.logger[level] === 'function') {
// Function to recursively sanitize an object
// Sanitize the additional data
const sanitizedData = this._sanitize(additionalData);
// Prepare the log entry
const logEntry = {
timestamp: new Date().toISOString(),
level,
message,
details: sanitizedData,
// Include some general context, but sanitize sensitive parts
context: this._sanitize({
region: this.region,
endpoint: this.endpoint,
// Only include the first few characters of the access key, if it exists
accessKeyId: this.accessKeyId ? `${this.accessKeyId.substring(0, 4)}...` : undefined,
}),
};
// Log the sanitized entry
this.logger[level](JSON.stringify(logEntry));
}
}
_validateConstructorParams(accessKeyId, secretAccessKey, endpoint) {
if (typeof accessKeyId !== 'string' || accessKeyId.trim().length === 0) {
throw new TypeError(ERROR_ACCESS_KEY_REQUIRED);
}
if (typeof secretAccessKey !== 'string' || secretAccessKey.trim().length === 0) {
throw new TypeError(ERROR_SECRET_KEY_REQUIRED);
}
if (typeof endpoint !== 'string' || endpoint.trim().length === 0) {
throw new TypeError(ERROR_ENDPOINT_REQUIRED);
}
}
_ensureValidUrl(raw) {
const candidate = /^(https?:)?\/\//i.test(raw) ? raw : `https://${raw}`;
try {
new URL(candidate);
// Find the last non-slash character
let endIndex = candidate.length;
while (endIndex > 0 && candidate[endIndex - 1] === '/') {
endIndex--;
}
return endIndex === candidate.length ? candidate : candidate.substring(0, endIndex);
}
catch {
const msg = `${ERROR_ENDPOINT_FORMAT} But provided: "${raw}"`;
this._log('error', msg);
throw new TypeError(msg);
}
}
_validateMethodIsGetOrHead(method) {
if (method !== 'GET' && method !== 'HEAD') {
this._log('error', `${ERROR_PREFIX}method must be either GET or HEAD`);
throw new Error(`${ERROR_PREFIX}method must be either GET or HEAD`);
}
}
_checkKey(key) {
if (typeof key !== 'string' || key.trim().length === 0) {
this._log('error', ERROR_KEY_REQUIRED);
throw new TypeError(ERROR_KEY_REQUIRED);
}
}
_checkDelimiter(delimiter) {
if (typeof delimiter !== 'string' || delimiter.trim().length === 0) {
this._log('error', ERROR_DELIMITER_REQUIRED);
throw new TypeError(ERROR_DELIMITER_REQUIRED);
}
}
_checkPrefix(prefix) {
if (typeof prefix !== 'string') {
this._log('error', ERROR_PREFIX_TYPE);
throw new TypeError(ERROR_PREFIX_TYPE);
}
}
// private _checkMaxKeys(maxKeys: number): void {
// if (typeof maxKeys !== 'number' || maxKeys <= 0) {
// this._log('error', C.ERROR_MAX_KEYS_TYPE);
// throw new TypeError(C.ERROR_MAX_KEYS_TYPE);
// }
// }
_checkOpts(opts) {
if (typeof opts !== 'object') {
this._log('error', `${ERROR_PREFIX}opts must be an object`);
throw new TypeError(`${ERROR_PREFIX}opts must be an object`);
}
}
_filterIfHeaders(opts) {
const filteredOpts = {};
const conditionalHeaders = {};
const ifHeaders = ['if-match', 'if-none-match', 'if-modified-since', 'if-unmodified-since'];
for (const [key, value] of Object.entries(opts)) {
if (ifHeaders.includes(key.toLowerCase())) {
// Convert to lowercase for consistency
conditionalHeaders[key] = value;
}
else {
filteredOpts[key] = value;
}
}
return { filteredOpts, conditionalHeaders };
}
_validateUploadPartParams(key, uploadId, data, partNumber, opts) {
this._checkKey(key);
if (!(data instanceof Buffer || typeof data === 'string')) {
this._log('error', ERROR_DATA_BUFFER_REQUIRED);
throw new TypeError(ERROR_DATA_BUFFER_REQUIRED);
}
if (typeof uploadId !== 'string' || uploadId.trim().length === 0) {
this._log('error', ERROR_UPLOAD_ID_REQUIRED);
throw new TypeError(ERROR_UPLOAD_ID_REQUIRED);
}
if (!Number.isInteger(partNumber) || partNumber <= 0) {
this._log('error', `${ERROR_PREFIX}partNumber must be a positive integer`);
throw new TypeError(`${ERROR_PREFIX}partNumber must be a positive integer`);
}
this._checkOpts(opts);
}
_sign(method, keyPath, query = {}, headers = {}) {
// Create URL without appending keyPath first
const url = new URL(this.endpoint);
// Properly format the pathname to avoid double slashes
if (keyPath && keyPath.length > 0) {
url.pathname =
url.pathname === '/' ? `/${keyPath.replace(/^\/+/, '')}` : `${url.pathname}/${keyPath.replace(/^\/+/, '')}`;
}
const fullDatetime = new Date().toISOString().replace(/[:-]|\.\d{3}/g, '');
const shortDatetime = fullDatetime.slice(0, 8);
const credentialScope = this._buildCredentialScope(shortDatetime);
headers[HEADER_AMZ_CONTENT_SHA256] = UNSIGNED_PAYLOAD; // body ? U.hash(body) : C.UNSIGNED_PAYLOAD;
headers[HEADER_AMZ_DATE] = fullDatetime;
headers[HEADER_HOST] = url.host;
// sort headers alphabetically by key
const ignoredHeaders = ['authorization', 'content-length', 'content-type', 'user-agent'];
let headersForSigning = Object.fromEntries(Object.entries(headers).filter(([key]) => !ignoredHeaders.includes(key.toLowerCase())));
headersForSigning = Object.fromEntries(Object.entries(headersForSigning).sort(([keyA], [keyB]) => keyA.localeCompare(keyB)));
const canonicalHeaders = this._buildCanonicalHeaders(headersForSigning);
const signedHeaders = Object.keys(headersForSigning)
.map(key => key.toLowerCase())
.sort()
.join(';');
const canonicalRequest = this._buildCanonicalRequest(method, url, query, canonicalHeaders, signedHeaders);
const stringToSign = this._buildStringToSign(fullDatetime, credentialScope, canonicalRequest);
const signature = this._calculateSignature(shortDatetime, stringToSign);
const authorizationHeader = this._buildAuthorizationHeader(credentialScope, signedHeaders, signature);
headers[HEADER_AUTHORIZATION] = authorizationHeader;
return { url: url.toString(), headers };
}
_buildCanonicalHeaders(headers) {
return Object.entries(headers)
.map(([key, value]) => `${key.toLowerCase()}:${String(value).trim()}`)
.join('\n');
}
_buildCanonicalRequest(method, url, query, canonicalHeaders, signedHeaders) {
const parts = [
method,
url.pathname,
this._buildCanonicalQueryString(query),
canonicalHeaders + '\n', // Canonical headers end with extra newline
signedHeaders,
UNSIGNED_PAYLOAD,
];
return parts.join('\n');
}
_buildCredentialScope(shortDatetime) {
return [shortDatetime, this.region, S3_SERVICE, AWS_REQUEST_TYPE].join('/');
}
_buildStringToSign(fullDatetime, credentialScope, canonicalRequest) {
return [AWS_ALGORITHM, fullDatetime, credentialScope, hash(canonicalRequest)].join('\n');
}
_calculateSignature(shortDatetime, stringToSign) {
if (shortDatetime !== this.signingKeyDate) {
this.signingKeyDate = shortDatetime;
this.signingKey = this._getSignatureKey(shortDatetime);
}
return hmac(this.signingKey, stringToSign, 'hex');
}
_buildAuthorizationHeader(credentialScope, signedHeaders, signature) {
return [
`${AWS_ALGORITHM} Credential=${this.accessKeyId}/${credentialScope}`,
`SignedHeaders=${signedHeaders}`,
`Signature=${signature}`,
].join(', ');
}
async _signedRequest(method, // 'GET' | 'HEAD' | 'PUT' | 'POST' | 'DELETE'
key, // ββ allowed for bucketβlevel ops
{ query = {}, // ?query=string
body = '', // string | Buffer | undefined
headers = {}, // extra/override headers
tolerated = [], // [200, 404] etc.
withQuery = false, // append query string to signed URL
} = {}) {
// Basic validation
if (!['GET', 'HEAD', 'PUT', 'POST', 'DELETE'].includes(method)) {
throw new Error(`${ERROR_PREFIX}Unsupported HTTP method ${method}`);
}
const { filteredOpts, conditionalHeaders } = ['GET', 'HEAD'].includes(method)
? this._filterIfHeaders(query)
: { filteredOpts: query, conditionalHeaders: {} };
const baseHeaders = {
[HEADER_AMZ_CONTENT_SHA256]: UNSIGNED_PAYLOAD,
// ...(['GET', 'HEAD'].includes(method) ? { [C.HEADER_CONTENT_TYPE]: C.JSON_CONTENT_TYPE } : {}),
...headers,
...conditionalHeaders,
};
const encodedKey = key ? uriResourceEscape(key) : '';
const { url, headers: signedHeaders } = this._sign(method, encodedKey, filteredOpts, baseHeaders);
if (Object.keys(query).length > 0) {
withQuery = true; // append query string to signed URL
}
const filteredOptsStrings = Object.fromEntries(Object.entries(filteredOpts).map(([k, v]) => [k, String(v)]));
const finalUrl = withQuery && Object.keys(filteredOpts).length ? `${url}?${new URLSearchParams(filteredOptsStrings)}` : url;
const signedHeadersString = Object.fromEntries(Object.entries(signedHeaders).map(([k, v]) => [k, String(v)]));
return this._sendRequest(finalUrl, method, signedHeadersString, body, tolerated);
}
/**
* Gets the current configuration properties of the S3 instance.
* @returns {IT.S3Config} The current S3 configuration object containing all settings.
* @example
* const config = s3.getProps();
* console.log(config.endpoint); // 'https://s3.amazonaws.com/my-bucket'
*/
getProps() {
return {
accessKeyId: this.accessKeyId,
secretAccessKey: this.secretAccessKey,
endpoint: this.endpoint,
region: this.region,
requestSizeInBytes: this.requestSizeInBytes,
requestAbortTimeout: this.requestAbortTimeout,
logger: this.logger,
};
}
/**
* Updates the configuration properties of the S3 instance.
* @param {IT.S3Config} props - The new configuration object.
* @param {string} props.accessKeyId - The access key ID for authentication.
* @param {string} props.secretAccessKey - The secret access key for authentication.
* @param {string} props.endpoint - The endpoint URL of the S3-compatible service.
* @param {string} [props.region='auto'] - The region of the S3 service.
* @param {number} [props.requestSizeInBytes=8388608] - The request size of a single request in bytes.
* @param {number} [props.requestAbortTimeout] - The timeout in milliseconds after which a request should be aborted.
* @param {IT.Logger} [props.logger] - A logger object with methods like info, warn, error.
* @throws {TypeError} Will throw an error if required parameters are missing or of incorrect type.
* @example
* s3.setProps({
* accessKeyId: 'new-access-key',
* secretAccessKey: 'new-secret-key',
* endpoint: 'https://new-endpoint.com/my-bucket',
* region: 'us-west-2' // by default is auto
* });
*/
setProps(props) {
this._validateConstructorParams(props.accessKeyId, props.secretAccessKey, props.endpoint);
this.accessKeyId = props.accessKeyId;
this.secretAccessKey = props.secretAccessKey;
this.region = props.region || 'auto';
this.endpoint = props.endpoint;
this.requestSizeInBytes = props.requestSizeInBytes || DEFAULT_REQUEST_SIZE_IN_BYTES;
this.requestAbortTimeout = props.requestAbortTimeout;
this.logger = props.logger;
}
/**
* Sanitizes an ETag value by removing surrounding quotes and whitespace.
* Still returns RFC compliant ETag. https://www.rfc-editor.org/rfc/rfc9110#section-8.8.3
* @param {string} etag - The ETag value to sanitize.
* @returns {string} The sanitized ETag value.
* @example
* const cleanEtag = s3.sanitizeETag('"abc123"'); // Returns: 'abc123'
*/
sanitizeETag(etag) {
return sanitizeETag(etag);
}
/**
* Creates a new bucket.
* This method sends a request to create a new bucket in the specified in endpoint.
* @returns A promise that resolves to true if the bucket was created successfully, false otherwise.
*/
async createBucket() {
const xmlBody = `
<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<LocationConstraint>${this.region}</LocationConstraint>
</CreateBucketConfiguration>
`;
const headers = {
[HEADER_CONTENT_TYPE]: XML_CONTENT_TYPE,
[HEADER_CONTENT_LENGTH]: Buffer.byteLength(xmlBody).toString(),
};
const res = await this._signedRequest('PUT', '', {
body: xmlBody,
headers,
tolerated: [200, 404, 403, 409], // donβt throw on 404/403 // 409 = bucket already exists
});
return res.status === 200;
}
/**
* Checks if a bucket exists.
* This method sends a request to check if the specified bucket exists in the S3-compatible service.
* @returns A promise that resolves to true if the bucket exists, false otherwise.
*/
async bucketExists() {
const res = await this._signedRequest('HEAD', '', { tolerated: [200, 404, 403] });
return res.status === 200;
}
/**
* Lists objects in the bucket with optional filtering and no pagination.
* This method retrieves all objects matching the criteria (not paginated like listObjectsV2).
* @param {string} [delimiter='/'] - The delimiter to use for grouping objects.
* @param {string} [prefix=''] - The prefix to filter objects by.
* @param {number} [maxKeys] - The maximum number of keys to return. If not provided, all keys will be returned.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @returns {Promise<IT.ListObject[] | null>} A promise that resolves to an array of objects or null if the bucket is empty.
* @example
* // List all objects
* const objects = await s3.listObjects();
*
* // List objects with prefix
* const photos = await s3.listObjects('/', 'photos/', 100);
*/
async listObjects(delimiter = '/', prefix = '', maxKeys,
// method: IT.HttpMethod = 'GET', // 'GET' or 'HEAD'
opts = {}) {
this._checkDelimiter(delimiter);
this._checkPrefix(prefix);
this._checkOpts(opts);
const keyPath = delimiter === '/' ? delimiter : uriEscape(delimiter);
const unlimited = !(maxKeys && maxKeys > 0);
let remaining = unlimited ? Infinity : maxKeys;
let token;
const all = [];
do {
const batchSize = Math.min(remaining, 1000); // S3 ceiling
const query = {
'list-type': LIST_TYPE, // =2 for V2
'max-keys': String(batchSize),
...(prefix ? { prefix } : {}),
...(token ? { 'continuation-token': token } : {}),
...opts,
};
const res = await this._signedRequest('GET', keyPath, {
query,
withQuery: true,
tolerated: [200, 404],
});
if (res.status === 404) {
return null;
}
if (res.status !== 200) {
const errorBody = await res.text();
const errorCode = res.headers.get('x-amz-error-code') || 'Unknown';
const errorMessage = res.headers.get('x-amz-error-message') || res.statusText;
this._log('error', `${ERROR_PREFIX}Request failed with status ${res.status}: ${errorCode} - ${errorMessage}, err body: ${errorBody}`);
throw new Error(`${ERROR_PREFIX}Request failed with status ${res.status}: ${errorCode} - ${errorMessage}, err body: ${errorBody}`);
}
const raw = parseXml(await res.text());
if (typeof raw !== 'object' || !raw || 'error' in raw) {
this._log('error', `${ERROR_PREFIX}Unexpected listObjects response shape: ${JSON.stringify(raw)}`);
throw new Error(`${ERROR_PREFIX}Unexpected listObjects response shape`);
}
const out = (raw.ListBucketResult || raw.listBucketResult || raw);
/* accumulate Contents */
const contents = out.Contents || out.contents; // S3 v2 vs v1
if (contents) {
const batch = Array.isArray(contents) ? contents : [contents];
all.push(...batch);
if (!unlimited) {
remaining -= batch.length;
}
}
const truncated = out.IsTruncated === 'true' || out.isTruncated === 'true' || false;
token = truncated
? (out.NextContinuationToken || out.nextContinuationToken || out.NextMarker || out.nextMarker)
: undefined;
} while (token && remaining > 0);
return all;
}
/**
* Lists multipart uploads in the bucket.
* This method sends a request to list multipart uploads in the specified bucket.
* @param {string} [delimiter='/'] - The delimiter to use for grouping uploads.
* @param {string} [prefix=''] - The prefix to filter uploads by.
* @param {IT.HttpMethod} [method='GET'] - The HTTP method to use for the request (GET or HEAD).
* @param {Record<string, string | number | boolean | undefined>} [opts={}] - Additional options for the request.
* @returns A promise that resolves to a list of multipart uploads or an error.
*/
async listMultipartUploads(delimiter = '/', prefix = '', method = 'GET', opts = {}) {
this._checkDelimiter(delimiter);
this._checkPrefix(prefix);
this._validateMethodIsGetOrHead(method);
this._checkOpts(opts);
const query = { uploads: '', ...opts };
const keyPath = delimiter === '/' ? delimiter : uriEscape(delimiter);
const res = await this._signedRequest(method, keyPath, {
query,
withQuery: true,
});
// doublecheck if this is needed
// if (method === 'HEAD') {
// return {
// size: +(res.headers.get(C.HEADER_CONTENT_LENGTH) ?? '0'),
// mtime: res.headers.get(C.HEADER_LAST_MODIFIED) ? new Date(res.headers.get(C.HEADER_LAST_MODIFIED)!) : undefined,
// etag: res.headers.get(C.HEADER_ETAG) ?? '',
// };
// }
const raw = parseXml(await res.text());
if (typeof raw !== 'object' || raw === null) {
throw new Error(`${ERROR_PREFIX}Unexpected listMultipartUploads response shape`);
}
if ('listMultipartUploadsResult' in raw) {
return raw.listMultipartUploadsResult;
}
return raw;
}
/**
* Get an object from the S3-compatible service.
* This method sends a request to retrieve the specified object from the S3-compatible service.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the object data (string) or null if not found.
*/
async getObject(key, opts = {}, ssecHeaders) {
// if ssecHeaders is set, add it to headers
const res = await this._signedRequest('GET', key, {
query: opts, // use opts.query if it exists, otherwise use an empty object
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return null;
}
return res.text();
}
/**
* Get an object response from the S3-compatible service.
* This method sends a request to retrieve the specified object and returns the full response.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the Response object or null if not found.
*/
async getObjectResponse(key, opts = {}, ssecHeaders) {
const res = await this._signedRequest('GET', key, {
query: opts,
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return null;
}
return res;
}
/**
* Get an object as an ArrayBuffer from the S3-compatible service.
* This method sends a request to retrieve the specified object and returns it as an ArrayBuffer.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the object data as an ArrayBuffer or null if not found.
*/
async getObjectArrayBuffer(key, opts = {}, ssecHeaders) {
const res = await this._signedRequest('GET', key, {
query: opts,
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return null;
}
return res.arrayBuffer();
}
/**
* Get an object as JSON from the S3-compatible service.
* This method sends a request to retrieve the specified object and returns it as JSON.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the object data as JSON or null if not found.
*/
async getObjectJSON(key, opts = {}, ssecHeaders) {
const res = await this._signedRequest('GET', key, {
query: opts,
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return null;
}
return res.json();
}
/**
* Get an object with its ETag from the S3-compatible service.
* This method sends a request to retrieve the specified object and its ETag.
* @param {string} key - The key of the object to retrieve.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to an object containing the ETag and the object data as an ArrayBuffer or null if not found.
*/
async getObjectWithETag(key, opts = {}, ssecHeaders) {
try {
const res = await this._signedRequest('GET', key, {
query: opts,
tolerated: [200, 404, 412, 304],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if ([404, 412, 304].includes(res.status)) {
return { etag: null, data: null };
}
const etag = res.headers.get(HEADER_ETAG);
if (!etag) {
throw new Error(`${ERROR_PREFIX}ETag not found in response headers`);
}
return { etag: sanitizeETag(etag), data: await res.arrayBuffer() };
}
catch (err) {
this._log('error', `Error getting object ${key} with ETag: ${String(err)}`);
throw err;
}
}
/**
* Get an object as a raw response from the S3-compatible service.
* This method sends a request to retrieve the specified object and returns the raw response.
* @param {string} key - The key of the object to retrieve.
* @param {boolean} [wholeFile=true] - Whether to retrieve the whole file or a range.
* @param {number} [rangeFrom=0] - The starting byte for the range (if not whole file).
* @param {number} [rangeTo=this.requestSizeInBytes] - The ending byte for the range (if not whole file).
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns A promise that resolves to the Response object.
*/
async getObjectRaw(key, wholeFile = true, rangeFrom = 0, rangeTo = this.requestSizeInBytes, opts = {}, ssecHeaders) {
const rangeHdr = wholeFile ? {} : { range: `bytes=${rangeFrom}-${rangeTo - 1}` };
return this._signedRequest('GET', key, {
query: { ...opts },
headers: { ...rangeHdr, ...ssecHeaders },
withQuery: true, // keep ?query=string behaviour
});
}
/**
* Get the content length of an object.
* This method sends a HEAD request to retrieve the content length of the specified object.
* @param {string} key - The key of the object to retrieve the content length for.
* @returns A promise that resolves to the content length of the object in bytes, or 0 if not found.
* @throws {Error} If the content length header is not found in the response.
*/
async getContentLength(key, ssecHeaders) {
try {
const res = await this._signedRequest('HEAD', key, {
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
const len = res.headers.get(HEADER_CONTENT_LENGTH);
return len ? +len : 0;
}
catch (err) {
this._log('error', `Error getting content length for object ${key}: ${String(err)}`);
throw new Error(`${ERROR_PREFIX}Error getting content length for object ${key}: ${String(err)}`);
}
}
/**
* Checks if an object exists in the S3-compatible service.
* This method sends a HEAD request to check if the specified object exists.
* @param {string} key - The key of the object to check.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @returns A promise that resolves to true if the object exists, false if not found, or null if ETag mismatch.
*/
async objectExists(key, opts = {}) {
const res = await this._signedRequest('HEAD', key, {
query: opts,
tolerated: [200, 404, 412, 304],
});
if (res.status === 404) {
return false; // not found
}
if (res.status === 412 || res.status === 304) {
return null; // ETag mismatch
}
return true; // found (200)
}
/**
* Retrieves the ETag of an object without downloading its content.
* @param {string} key - The key of the object to retrieve the ETag for.
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<string | null>} A promise that resolves to the ETag value or null if the object is not found.
* @throws {Error} If the ETag header is not found in the response.
* @example
* const etag = await s3.getEtag('path/to/file.txt');
* if (etag) {
* console.log(`File ETag: ${etag}`);
* }
*/
async getEtag(key, opts = {}, ssecHeaders) {
const res = await this._signedRequest('HEAD', key, {
query: opts,
tolerated: [200, 304, 404, 412],
headers: ssecHeaders ? { ...ssecHeaders } : undefined,
});
if (res.status === 404) {
return null;
}
if (res.status === 412 || res.status === 304) {
return null; // ETag mismatch
}
const etag = res.headers.get(HEADER_ETAG);
if (!etag) {
throw new Error(`${ERROR_PREFIX}ETag not found in response headers`);
}
return sanitizeETag(etag);
}
/**
* Uploads an object to the S3-compatible service.
* @param {string} key - The key/path where the object will be stored.
* @param {string | Buffer} data - The data to upload (string or Buffer).
* @param {string} [fileType='application/octet-stream'] - The MIME type of the file.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<Response>} A promise that resolves to the Response object from the upload request.
* @throws {TypeError} If data is not a string or Buffer.
* @example
* // Upload text file
* await s3.putObject('hello.txt', 'Hello, World!', 'text/plain');
*
* // Upload binary data
* const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47]);
* await s3.putObject('image.png', buffer, 'image/png');
*/
async putObject(key, data, fileType = DEFAULT_STREAM_CONTENT_TYPE, ssecHeaders) {
if (!(data instanceof Buffer || typeof data === 'string')) {
throw new TypeError(ERROR_DATA_BUFFER_REQUIRED);
}
return this._signedRequest('PUT', key, {
body: data,
headers: {
[HEADER_CONTENT_LENGTH]: typeof data === 'string' ? Buffer.byteLength(data) : data.length,
[HEADER_CONTENT_TYPE]: fileType,
...ssecHeaders,
},
tolerated: [200],
});
}
/**
* Initiates a multipart upload and returns the upload ID.
* @param {string} key - The key/path where the object will be stored.
* @param {string} [fileType='application/octet-stream'] - The MIME type of the file.
* @param {IT.SSECHeaders?} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<string>} A promise that resolves to the upload ID for the multipart upload.
* @throws {TypeError} If key is invalid or fileType is not a string.
* @throws {Error} If the multipart upload fails to initialize.
* @example
* const uploadId = await s3.getMultipartUploadId('large-file.zip', 'application/zip');
* console.log(`Started multipart upload: ${uploadId}`);
*/
async getMultipartUploadId(key, fileType = DEFAULT_STREAM_CONTENT_TYPE, ssecHeaders) {
this._checkKey(key);
if (typeof fileType !== 'string') {
throw new TypeError(`${ERROR_PREFIX}fileType must be a string`);
}
const query = { uploads: '' };
const headers = { [HEADER_CONTENT_TYPE]: fileType, ...ssecHeaders };
const res = await this._signedRequest('POST', key, {
query,
headers,
withQuery: true,
});
const parsed = parseXml(await res.text());
// if (
// parsed &&
// typeof parsed === 'object' &&
// 'initiateMultipartUploadResult' in parsed &&
// parsed.initiateMultipartUploadResult &&
// 'uploadId' in (parsed.initiateMultipartUploadResult as { uploadId: string })
// ) {
// return (parsed.initiateMultipartUploadResult as { uploadId: string }).uploadId;
// }
if (parsed && typeof parsed === 'object') {
// Check for both cases of InitiateMultipartUploadResult
const uploadResult = parsed.initiateMultipartUploadResult ||
parsed.InitiateMultipartUploadResult;
if (uploadResult && typeof uploadResult === 'object') {
// Check for both cases of uploadId
const uploadId = uploadResult.uploadId || uploadResult.UploadId;
if (uploadId && typeof uploadId === 'string') {
return uploadId;
}
}
}
throw new Error(`${ERROR_PREFIX}Failed to create multipart upload: ${JSON.stringify(parsed)}`);
}
/**
* Uploads a part in a multipart upload.
* @param {string} key - The key of the object being uploaded.
* @param {string} uploadId - The upload ID from getMultipartUploadId.
* @param {Buffer | string} data - The data for this part.
* @param {number} partNumber - The part number (must be between 1 and 10,000).
* @param {Record<string, unknown>} [opts={}] - Additional options for the request.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<IT.UploadPart>} A promise that resolves to an object containing the partNumber and etag.
* @throws {TypeError} If any parameter is invalid.
* @example
* const part = await s3.uploadPart(
* 'large-file.zip',
* uploadId,
* partData,
* 1
* );
* console.log(`Part ${part.partNumber} uploaded with ETag: ${part.etag}`);
*/
async uploadPart(key, uploadId, data, partNumber, opts = {}, ssecHeaders) {
this._validateUploadPartParams(key, uploadId, data, partNumber, opts);
const query = { uploadId, partNumber, ...opts };
const res = await this._signedRequest('PUT', key, {
query,
body: data,
headers: {
[HEADER_CONTENT_LENGTH]: typeof data === 'string' ? Buffer.byteLength(data) : data.length,
...ssecHeaders,
},
});
return { partNumber, etag: sanitizeETag(res.headers.get('etag') || '') };
}
/**
* Completes a multipart upload by combining all uploaded parts.
* @param {string} key - The key of the object being uploaded.
* @param {string} uploadId - The upload ID from getMultipartUploadId.
* @param {Array<IT.UploadPart>} parts - Array of uploaded parts with partNumber and etag.
* @returns {Promise<IT.CompleteMultipartUploadResult>} A promise that resolves to the completion result containing the final ETag.
* @throws {Error} If the multipart upload fails to complete.
* @example
* const result = await s3.completeMultipartUpload(
* 'large-file.zip',
* uploadId,
* [
* { partNumber: 1, etag: 'abc123' },
* { partNumber: 2, etag: 'def456' }
* ]
* );
* console.log(`Upload completed with ETag: ${result.etag}`);
*/
async completeMultipartUpload(key, uploadId, parts) {
const query = { uploadId };
const xmlBody = this._buildCompleteMultipartUploadXml(parts);
const headers = {
[HEADER_CONTENT_TYPE]: XML_CONTENT_TYPE,
[HEADER_CONTENT_LENGTH]: Buffer.byteLength(xmlBody).toString(),
};
const res = await this._signedRequest('POST', key, {
query,
body: xmlBody,
headers,
withQuery: true,
});
const parsed = parseXml(await res.text());
if (parsed && typeof parsed === 'object') {
// Check for both cases
const result = parsed.completeMultipartUploadResult || parsed.CompleteMultipartUploadResult || parsed;
if (result && typeof result === 'object') {
const resultObj = result;
// Handle ETag in all its variations
const etag = resultObj.ETag || resultObj.eTag || resultObj.etag;
if (etag && typeof etag === 'string') {
return {
...resultObj,
etag: this.sanitizeETag(etag),
};
}
return result;
}
}
throw new Error(`${ERROR_PREFIX}Failed to complete multipart upload: ${JSON.stringify(parsed)}`);
}
/**
* Aborts a multipart upload and removes all uploaded parts.
* @param {string} key - The key of the object being uploaded.
* @param {string} uploadId - The upload ID to abort.
* @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
* @returns {Promise<object>} A promise that resolves to an object containing the abort status and details.
* @throws {TypeError} If key or uploadId is invalid.
* @throws {Error} If the abort operation fails.
* @example
* try {
* const result = await s3.abortMultipartUpload('large-file.zip', uploadId);
* console.log('Upload aborted:', result.status);
* } catch (error) {
* console.error('Failed to abort upload:', error);
* }
*/
async abortMultipartUpload(key, uploadId, ssecHeaders) {
this._checkKey(key);
if (!uploadId) {
throw new TypeError(ERROR_UPLOAD_ID_REQUIRED);
}
const query = { uploadId };
const headers = { [HEADER_CONTENT_TYPE]: XML_CONTENT_TYPE, ...(ssecHeaders ? { ...ssecHeaders } : {}) };
const res = await this._signedRequest('DELETE', key, {
query,
headers,
withQuery: true,
});
const parsed = parseXml(await res.text());
if (parsed &&
'error' in parsed &&
typeof parsed.error === 'object' &&
parsed.error !== null &&
'message' in parsed.error) {
this._log('error', `${ERROR_PREFIX}Failed to abort multipart upload: ${String(parsed.error.message)}`);
throw new Error(`${ERROR_PREFIX}Failed to abort multipart upload: ${String(parsed.error.message)}`);
}
return { status: 'Aborted', key, uploadId, response: parsed };
}
_buildCompleteMultipartUploadXml(parts) {
return `
<CompleteMultipartUpload>
${parts
.map(part => `
<Part>
<PartNumber>${part.partNumber}</PartNumber>
<ETag>${part.etag}</ETag>
</Part>
`)
.join('')}
</CompleteMultipartUpload>
`;
}
/**
* Deletes an object from the bucket.
* This method sends a request to delete the specified object from the bucket.
* @param {string} key - The key of the object to delete.
* @