lean-s3
Version:
A server-side S3 API for the regular user.
1,160 lines (1,146 loc) • 54.5 kB
JavaScript
import * as nodeUtil from "node:util";
import { Readable } from "node:stream";
import { Agent, request } from "undici";
import { XMLBuilder, XMLParser } from "fast-xml-parser";
import { createHash, createHmac } from "node:crypto";
//#region src/S3Stat.ts
var S3Stat = class S3Stat {
etag;
lastModified;
size;
type;
constructor(etag, lastModified, size, type) {
this.etag = etag;
this.lastModified = lastModified;
this.size = size;
this.type = type;
}
static tryParseFromHeaders(headers) {
const lm = headers["last-modified"];
if (lm === null || typeof lm !== "string") return;
const etag = headers.etag;
if (etag === null || typeof etag !== "string") return;
const cl = headers["content-length"];
if (cl === null) return;
const size = Number(cl);
if (!Number.isSafeInteger(size)) return;
const ct = headers["content-type"];
if (ct === null || typeof ct !== "string") return;
return new S3Stat(etag, new Date(lm), size, ct);
}
};
//#endregion
//#region src/S3Error.ts
var S3Error = class extends Error {
code;
/** Path/key of the affected object. */
path;
message;
/** The HTTP status code. */
status;
constructor(code, path, { message = void 0, cause = void 0, status = void 0 } = {}) {
super(message, { cause });
this.code = code;
this.path = path;
this.message = message ?? "Some unknown error occurred.";
this.status = status;
}
};
//#endregion
//#region src/S3BucketEntry.ts
/**
* @internal Normally, we'd use an interface for that, but having a class with pre-defined fields makes it easier for V8 top optimize hidden classes.
*/
var S3BucketEntry = class S3BucketEntry {
key;
size;
lastModified;
etag;
storageClass;
checksumAlgorithm;
checksumType;
constructor(key, size, lastModified, etag, storageClass, checksumAlgorithm, checksumType) {
this.key = key;
this.size = size;
this.lastModified = lastModified;
this.etag = etag;
this.storageClass = storageClass;
this.checksumAlgorithm = checksumAlgorithm;
this.checksumType = checksumType;
}
/**
* @internal
*/
static parse(source) {
return new S3BucketEntry(source.Key, source.Size, new Date(source.LastModified), source.ETag, source.StorageClass, source.ChecksumAlgorithm, source.ChecksumType);
}
};
//#endregion
//#region src/sign.ts
function deriveSigningKey(date, region, secretAccessKey) {
return createHmac("sha256", createHmac("sha256", createHmac("sha256", createHmac("sha256", `AWS4${secretAccessKey}`).update(date).digest()).update(region).digest()).update("s3").digest()).update("aws4_request").digest();
}
function signEncodedPolicy(signingKey, encodedPolicy) {
return createHmac("sha256", signingKey).update(encodedPolicy).digest("hex");
}
function signCanonicalDataHash(signingKey, canonicalDataHash, date, region) {
return createHmac("sha256", signingKey).update(`AWS4-HMAC-SHA256\n${date.dateTime}\n${date.date}/${region}/s3/aws4_request\n${canonicalDataHash}`).digest("hex");
}
const unsignedPayload = "UNSIGNED-PAYLOAD";
/**
* Same as {@see createCanonicalDataDigest}, but only sets the `host` header and the content hash to `UNSIGNED-PAYLOAD`.
*
* Used for pre-signing only. Pre-signed URLs [cannot contain content hashes](https://github.com/aws/aws-sdk-js/blob/966fa6c316dbb11ca9277564ff7120e6b16467f4/lib/signers/v4.js#L182-L183)
* and the only header that is signed is `host`. So we can use an optimized version for that.
*/
function createCanonicalDataDigestHostOnly(method, path, query, host) {
return createHash("sha256").update(`${method}\n${path}\n${query}\nhost:${host}\n\nhost\nUNSIGNED-PAYLOAD`).digest("hex");
}
function createCanonicalDataDigest(method, path, query, sortedHeaders, contentHashStr) {
const sortedHeaderNames = Object.keys(sortedHeaders);
let canonData = `${method}\n${path}\n${query}\n`;
for (const header of sortedHeaderNames) canonData += `${header}:${sortedHeaders[header]}\n`;
canonData += "\n";
canonData += sortedHeaderNames.length > 0 ? sortedHeaderNames[0] : "";
for (let i = 1; i < sortedHeaderNames.length; ++i) canonData += `;${sortedHeaderNames[i]}`;
canonData += `\n${contentHashStr}`;
return createHash("sha256").update(canonData).digest("hex");
}
function sha256(data) {
return createHash("sha256").update(data).digest();
}
function md5Base64(data) {
return createHash("md5").update(data).digest("base64");
}
//#endregion
//#region src/KeyCache.ts
var KeyCache = class {
#lastNumericDay = -1;
#keys = /* @__PURE__ */ new Map();
computeIfAbsent(date, region, accessKeyId, secretAccessKey) {
if (date.numericDayStart !== this.#lastNumericDay) {
this.#keys.clear();
this.#lastNumericDay = date.numericDayStart;
}
const cacheKey = `${date.date}:${region}:${accessKeyId}`;
const key = this.#keys.get(cacheKey);
if (key) return key;
const newKey = deriveSigningKey(date.date, region, secretAccessKey);
this.#keys.set(cacheKey, newKey);
return newKey;
}
};
//#endregion
//#region src/AmzDate.ts
const ONE_DAY = 1e3 * 60 * 60 * 24;
function getAmzDate(dateTime) {
const date = pad4(dateTime.getUTCFullYear()) + pad2(dateTime.getUTCMonth() + 1) + pad2(dateTime.getUTCDate());
const time = pad2(dateTime.getUTCHours()) + pad2(dateTime.getUTCMinutes()) + pad2(dateTime.getUTCSeconds());
return {
numericDayStart: dateTime.getTime() / ONE_DAY | 0,
date,
dateTime: `${date}T${time}Z`
};
}
function now() {
return getAmzDate(/* @__PURE__ */ new Date());
}
function pad4(v) {
return v < 10 ? `000${v}` : v < 100 ? `00${v}` : v < 1e3 ? `0${v}` : v.toString();
}
function pad2(v) {
return v < 10 ? `0${v}` : v.toString();
}
//#endregion
//#region src/url.ts
function buildRequestUrl(endpoint, bucket, region, path) {
const normalizedBucket = normalizePath(bucket);
const [endpointWithBucketAndRegion, replacedBucket] = replaceDomainPlaceholders(endpoint, normalizedBucket, region);
const result = new URL(endpointWithBucketAndRegion);
result.pathname = (result.pathname.endsWith("/") ? result.pathname : `${result.pathname}/`) + (replacedBucket ? normalizePath(path) : `${normalizedBucket}/${normalizePath(path)}`).replaceAll(":", "%3A").replaceAll("+", "%2B").replaceAll("(", "%28").replaceAll(")", "%29").replaceAll(",", "%2C").replaceAll("'", "%27").replaceAll("*", "%2A");
return result;
}
function replaceDomainPlaceholders(endpoint, bucket, region) {
const replacedBucket = endpoint.includes("{bucket}");
return [endpoint.replaceAll("{bucket}", bucket).replaceAll("{region}", region), replacedBucket];
}
/**
* Removes trailing and leading slash.
*/
function normalizePath(path) {
const start = path[0] === "/" ? 1 : 0;
const end = path[path.length - 1] === "/" ? path.length - 1 : path.length;
return path.substring(start, end);
}
/**
* Sorts headers alphabetically. Removes headers that are undefined/null.
*
* `http.request` doesn't allow passing `undefined` as header values (despite the types allowing it),
* so we have to filter afterwards.
*/
function prepareHeadersForSigning(unfilteredHeadersUnsorted) {
const result = {};
for (const header of Object.keys(unfilteredHeadersUnsorted).sort()) {
const v = unfilteredHeadersUnsorted[header];
if (v !== void 0 && v !== null) result[header] = v;
}
return result;
}
function getRangeHeader(start, endExclusive) {
return typeof start === "number" || typeof endExclusive === "number" ? `bytes=${start ?? 0}-${typeof endExclusive === "number" ? endExclusive - 1 : ""}` : void 0;
}
//#endregion
//#region src/error.ts
const xmlParser$1 = new XMLParser();
async function getResponseError(response, path) {
let body;
try {
body = await response.body.text();
} catch (cause) {
return new S3Error("Unknown", path, {
message: "Could not read response body.",
cause
});
}
if (response.headers["content-type"] === "application/xml") return parseAndGetXmlError(body, path);
return new S3Error("Unknown", path, { message: "Unknown error during S3 request." });
}
function fromStatusCode(code, path) {
switch (code) {
case 404: return new S3Error("NoSuchKey", path, { message: "The specified key does not exist." });
case 403: return new S3Error("AccessDenied", path, { message: "Access denied to the key." });
default: return;
}
}
function parseAndGetXmlError(body, path) {
let error;
try {
error = xmlParser$1.parse(body);
} catch (cause) {
return new S3Error("Unknown", path, {
message: "Could not parse XML error response.",
cause
});
}
if (error.Error) {
const e = error.Error;
return new S3Error(e.Code || "Unknown", path, { message: e.Message || void 0 });
}
return new S3Error(error.Code || "Unknown", path, { message: error.Message || void 0 });
}
//#endregion
//#region src/request.ts
function getAuthorizationHeader(keyCache, method, path, query, date, sortedSignedHeaders, region, contentHashStr, accessKeyId, secretAccessKey) {
const dataDigest = createCanonicalDataDigest(method, path, query, sortedSignedHeaders, contentHashStr);
const signingKey = keyCache.computeIfAbsent(date, region, accessKeyId, secretAccessKey);
const signature = signCanonicalDataHash(signingKey, dataDigest, date, region);
const signedHeadersSpec = Object.keys(sortedSignedHeaders).join(";");
return `AWS4-HMAC-SHA256 Credential=${`${accessKeyId}/${date.date}/${region}/s3/aws4_request`}, SignedHeaders=${signedHeadersSpec}, Signature=${signature}`;
}
//#endregion
//#region src/branded.ts
function ensureValidBucketName(bucket) {
if (typeof bucket !== "string") throw new TypeError("`bucket` is required and must be a `string`.");
if (bucket.length < 3 || bucket.length > 63) throw new Error("`bucket` must be between 3 and 63 characters long.");
if (bucket.startsWith(".") || bucket.endsWith(".")) throw new Error("`bucket` must not start or end with a period (.)");
if (!/^[a-z0-9.-]+$/.test(bucket)) throw new Error("`bucket` can only contain lowercase letters, numbers, periods (.), and hyphens (-).");
if (bucket.includes("..")) throw new Error("`bucket` must not contain two adjacent periods (..)");
return bucket;
}
function ensureValidAccessKeyId(accessKeyId) {
if (typeof accessKeyId !== "string") throw new TypeError("`AccessKeyId` is required and must be a `string`.");
if (accessKeyId.length < 1) throw new RangeError("`AccessKeyId` must be at least 1 character long.");
return accessKeyId;
}
function ensureValidSecretAccessKey(secretAccessKey) {
if (typeof secretAccessKey !== "string") throw new TypeError("`SecretAccessKey` is required and must be a `string`.");
if (secretAccessKey.length < 1) throw new RangeError("`SecretAccessKey` must be at least 1 character long.");
return secretAccessKey;
}
function ensureValidPath(path) {
if (typeof path !== "string") throw new TypeError("`path` is required and must be a `string`.");
if (path.length < 1) throw new RangeError("`path` must be at least 1 character long.");
return path;
}
function ensureValidEndpoint(endpoint) {
if (typeof endpoint !== "string") throw new TypeError("`endpoint` is required and must be a `string`.");
if (endpoint.length < 1) throw new RangeError("`endpoint` must be at least 1 character long.");
return endpoint;
}
function ensureValidRegion(region) {
if (typeof region !== "string") throw new TypeError("`region` is required and must be a `string`.");
if (region.length < 1) throw new RangeError("`region` must be at least 1 character long.");
return region;
}
//#endregion
//#region src/assertNever.ts
function assertNever(v) {
throw new TypeError(`Expected value not to have type ${typeof v}`);
}
//#endregion
//#region src/encode.ts
/**
* Refs:
* - https://datatracker.ietf.org/doc/html/rfc5987#section-3.2
* - https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Content-Disposition
*/
function getContentDispositionHeader(value) {
switch (value.type) {
case "inline": return "inline";
case "attachment": {
const { filename } = value;
if (typeof filename === "undefined") return "attachment";
const encoded = encodeURIComponent(filename);
return `attachment;filename="${encoded}";filename*=UTF-8''${encoded}`;
}
default: assertNever(value);
}
}
function encodeURIComponentExtended(value) {
return encodeURIComponent(value).replaceAll(":", "%3A").replaceAll("+", "%2B").replaceAll("(", "%28").replaceAll(")", "%29").replaceAll(",", "%2C").replaceAll("'", "%27").replaceAll("*", "%2A");
}
//#endregion
//#region src/S3Client.ts
const kWrite = Symbol("kWrite");
const kStream = Symbol("kStream");
const kSignedRequest = Symbol("kSignedRequest");
const kGetEffectiveParams = Symbol("kGetEffectiveParams");
const xmlParser = new XMLParser({
ignoreAttributes: true,
isArray: (_, jPath) => jPath === "ListMultipartUploadsResult.Upload" || jPath === "ListBucketResult.Contents" || jPath === "ListPartsResult.Part" || jPath === "DeleteResult.Deleted" || jPath === "DeleteResult.Error" || jPath === "CORSConfiguration.CORSRule" || jPath === "CORSConfiguration.CORSRule.AllowedMethod" || jPath === "CORSConfiguration.CORSRule.AllowedOrigin" || jPath === "CORSConfiguration.CORSRule.AllowedHeader" || jPath === "CORSConfiguration.CORSRule.ExposeHeader"
});
const xmlBuilder = new XMLBuilder({
attributeNamePrefix: "$",
ignoreAttributes: false
});
/**
* A configured S3 bucket instance for managing files.
*
* @example
* ```js
* // Basic bucket setup
* const bucket = new S3Client({
* bucket: "my-bucket",
* accessKeyId: "key",
* secretAccessKey: "secret"
* });
* // Get file instance
* const file = bucket.file("image.jpg");
* await file.delete();
* ```
*/
var S3Client = class {
#options;
#keyCache = new KeyCache();
#dispatcher = new Agent();
/**
* Create a new instance of an S3 bucket so that credentials can be managed from a single instance instead of being passed to every method.
*
* @param options The default options to use for the S3 client.
*/
constructor(options) {
if (!options) throw new Error("`options` is required.");
this.#options = {
accessKeyId: ensureValidAccessKeyId(options.accessKeyId),
secretAccessKey: ensureValidSecretAccessKey(options.secretAccessKey),
endpoint: ensureValidEndpoint(options.endpoint),
region: ensureValidRegion(options.region),
bucket: ensureValidBucketName(options.bucket),
sessionToken: options.sessionToken
};
}
/** @internal */
[kGetEffectiveParams](options) {
return [
options.region ? ensureValidRegion(options.region) : this.#options.region,
options.endpoint ? ensureValidEndpoint(options.endpoint) : this.#options.endpoint,
options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket
];
}
/**
* Creates an S3File instance for the given path.
*
* @param {string} path The path to the object in the bucket. Also known as [object key](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html).
* We recommend not using the following characters in a key name because of significant special character handling, which isn't consistent across all applications (see [AWS docs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html)):
* - Backslash (`\\`)
* - Left brace (`{`)
* - Non-printable ASCII characters (128–255 decimal characters)
* - Caret or circumflex (`^`)
* - Right brace (`}`)
* - Percent character (`%`)
* - Grave accent or backtick (`\``)
* - Right bracket (`]`)
* - Quotation mark (`"`)
* - Greater than sign (`>`)
* - Left bracket (`[`)
* - Tilde (`~`)
* - Less than sign (`<`)
* - Pound sign (`#`)
* - Vertical bar or pipe (`|`)
*
* lean-s3 does not enforce these restrictions.
*
* @example
* ```js
* const file = client.file("image.jpg");
* await file.write(imageData);
*
* const configFile = client.file("config.json", {
* type: "application/json",
* });
* ```
*/
file(path, options = {}) {
return new S3File(this, ensureValidPath(path), void 0, void 0, options.type ?? void 0);
}
/**
* Generate a presigned URL for temporary access to a file.
* Useful for generating upload/download URLs without exposing credentials.
* @returns The operation on {@link S3Client#presign.path} as a pre-signed URL.
*
* @example
* ```js
* const downloadUrl = client.presign("file.pdf", {
* expiresIn: 3600 // 1 hour
* });
* ```
*
* @example
* ```js
* client.presign("foo.jpg", {
* expiresIn: 3600 // 1 hour
* response: {
* contentDisposition: {
* type: "attachment",
* filename: "download.jpg",
* },
* },
* });
* ```
*/
presign(path, options = {}) {
const contentLength = options.contentLength ?? void 0;
if (typeof contentLength === "number") {
if (contentLength < 0) throw new RangeError("`contentLength` must be >= 0.");
}
const method = options.method ?? "GET";
const contentType = options.type ?? void 0;
const [region, endpoint, bucket] = this[kGetEffectiveParams](options);
const contentDisposition = options.response?.contentDisposition;
const responseContentDisposition = contentDisposition ? getContentDispositionHeader(contentDisposition) : void 0;
const res = buildRequestUrl(endpoint, bucket, region, ensureValidPath(path));
const now$1 = /* @__PURE__ */ new Date();
const date = getAmzDate(now$1);
const query = buildSearchParams(`${this.#options.accessKeyId}/${date.date}/${region}/s3/aws4_request`, date, options.expiresIn ?? 3600, typeof contentLength === "number" || typeof contentType === "string" ? typeof contentLength === "number" && typeof contentType === "string" ? "content-length;content-type;host" : typeof contentLength === "number" ? "content-length;host" : typeof contentType === "string" ? "content-type;host" : "" : "host", unsignedPayload, options.storageClass, this.#options.sessionToken, options.acl, responseContentDisposition);
const dataDigest = typeof contentLength === "number" || typeof contentType === "string" ? createCanonicalDataDigest(method, res.pathname, query, typeof contentLength === "number" && typeof contentType === "string" ? {
"content-length": String(contentLength),
"content-type": contentType,
host: res.host
} : typeof contentLength === "number" ? {
"content-length": String(contentLength),
host: res.host
} : typeof contentType === "string" ? {
"content-type": contentType,
host: res.host
} : {}, unsignedPayload) : createCanonicalDataDigestHostOnly(method, res.pathname, query, res.host);
const signingKey = this.#keyCache.computeIfAbsent(date, region, this.#options.accessKeyId, this.#options.secretAccessKey);
res.search = `${query}&X-Amz-Signature=${signCanonicalDataHash(signingKey, dataDigest, date, region)}`;
return res.toString();
}
presignPost(options) {
const now$1 = /* @__PURE__ */ new Date();
const date = getAmzDate(now$1);
const key = options.key;
const region = ensureValidRegion(options.region ?? this.#options.region);
const bucket = ensureValidBucketName(options.bucket ?? this.#options.bucket);
const endpoint = ensureValidEndpoint(options.endpoint ?? this.#options.endpoint);
const expiresIn = options.expiresIn ?? 3600;
const credential = `${this.#options.accessKeyId}/${date.date}/${region}/s3/aws4_request`;
const fields = {
...options.fields,
bucket,
"X-Amz-Algorithm": "AWS4-HMAC-SHA256",
"X-Amz-Credential": credential,
"X-Amz-Date": date.dateTime,
...this.#options.sessionToken ? { "X-Amz-Security-Token": this.#options.sessionToken } : void 0
};
const policy = {
expiration: new Date(now$1.getTime() + expiresIn * 1e3).toISOString().replace(/\.\d{3}Z$/, "Z"),
conditions: [
[
"eq",
"$bucket",
bucket
],
key.endsWith("{{filename}}") ? [
"starts-with",
"$key",
key.substring(0, key.lastIndexOf("{{filename}}"))
] : [
"eq",
"$key",
key
],
...options.conditions ? options.conditions : [],
[
"eq",
"$x-amz-algorithm",
"AWS4-HMAC-SHA256"
],
[
"eq",
"$x-amz-credential",
credential
],
[
"eq",
"$x-amz-date",
date.dateTime
]
]
};
if (this.#options.sessionToken) policy.conditions.push([
"eq",
"$x-amz-security-token",
this.#options.sessionToken
]);
const policyJson = JSON.stringify(policy);
const encodedPolicy = Buffer.from(policyJson).toString("base64");
const signingKey = this.#keyCache.computeIfAbsent(date, region, this.#options.accessKeyId, this.#options.secretAccessKey);
return {
url: buildRequestUrl(endpoint, bucket, region, "").toString(),
fields: {
...fields,
key,
Policy: encodedPolicy,
"X-Amz-Signature": signEncodedPolicy(signingKey, encodedPolicy)
}
};
}
/**
* Copies an object from a source to a destination.
* @remarks Uses [`CopyObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CopyObject.html).
*/
async copyObject(sourceKey, destinationKey, options = {}) {
const sourceBucket = options.sourceBucket ? ensureValidBucketName(options.sourceBucket) : this.#options.bucket;
const destinationBucket = options.destinationBucket ? ensureValidBucketName(options.destinationBucket) : this.#options.bucket;
const normalizedSourceKey = normalizePath(ensureValidPath(sourceKey));
const copySource = encodeURIComponent(`${sourceBucket}/${normalizedSourceKey}`);
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, destinationBucket, "PUT", ensureValidPath(destinationKey), void 0, void 0, { "x-amz-copy-source": copySource }, void 0, void 0, options.signal);
if (response.statusCode !== 200) throw await getResponseError(response, destinationKey);
const res = ensureParsedXml(await response.body.text()).CopyObjectResult ?? {};
return {
etag: res.ETag,
lastModified: res.LastModified ? new Date(res.LastModified) : void 0,
checksumCRC32: res.ChecksumCRC32,
checksumCRC32C: res.ChecksumCRC32C,
checksumSHA1: res.ChecksumSHA1,
checksumSHA256: res.ChecksumSHA256
};
}
async createMultipartUpload(key, options = {}) {
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "POST", ensureValidPath(key), "uploads=", void 0, void 0, void 0, void 0, options.signal);
if (response.statusCode !== 200) throw await getResponseError(response, key);
const res = ensureParsedXml(await response.body.text()).InitiateMultipartUploadResult ?? {};
return {
bucket: res.Bucket,
key: res.Key,
uploadId: res.UploadId
};
}
/**
* @remarks Uses [`ListMultipartUploads`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListMultipartUploads.html).
* @throws {RangeError} If `options.maxKeys` is not between `1` and `1000`.
*/
async listMultipartUploads(options = {}) {
let query = "uploads=";
if (options.delimiter) {
if (typeof options.delimiter !== "string") throw new TypeError("`delimiter` must be a `string`.");
query += `&delimiter=${encodeURIComponent(options.delimiter)}`;
}
if (options.keyMarker) {
if (typeof options.keyMarker !== "string") throw new TypeError("`keyMarker` must be a `string`.");
query += `&key-marker=${encodeURIComponent(options.keyMarker)}`;
}
if (typeof options.maxUploads !== "undefined") {
if (typeof options.maxUploads !== "number") throw new TypeError("`maxUploads` must be a `number`.");
if (options.maxUploads < 1 || options.maxUploads > 1e3) throw new RangeError("`maxUploads` has to be between 1 and 1000.");
query += `&max-uploads=${options.maxUploads}`;
}
if (options.prefix) {
if (typeof options.prefix !== "string") throw new TypeError("`prefix` must be a `string`.");
query += `&prefix=${encodeURIComponent(options.prefix)}`;
}
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "GET", "", query, void 0, void 0, void 0, void 0, options.signal);
if (response.statusCode !== 200) throw await getResponseError(response, "");
const root = ensureParsedXml(await response.body.text()).ListMultipartUploadsResult ?? {};
return {
bucket: root.Bucket || void 0,
delimiter: root.Delimiter || void 0,
prefix: root.Prefix || void 0,
keyMarker: root.KeyMarker || void 0,
uploadIdMarker: root.UploadIdMarker || void 0,
nextKeyMarker: root.NextKeyMarker || void 0,
nextUploadIdMarker: root.NextUploadIdMarker || void 0,
maxUploads: root.MaxUploads ?? 1e3,
isTruncated: root.IsTruncated === "true",
uploads: root.Upload?.map((u) => ({
key: u.Key || void 0,
uploadId: u.UploadId || void 0,
storageClass: u.StorageClass || void 0,
checksumAlgorithm: u.ChecksumAlgorithm || void 0,
checksumType: u.ChecksumType || void 0,
initiated: u.Initiated ? new Date(u.Initiated) : void 0
})) ?? []
};
}
/**
* @remarks Uses [`AbortMultipartUpload`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_AbortMultipartUpload.html).
* @throws {RangeError} If `key` is not at least 1 character long.
* @throws {Error} If `uploadId` is not provided.
*/
async abortMultipartUpload(path, uploadId, options = {}) {
if (!uploadId) throw new Error("`uploadId` is required.");
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "DELETE", ensureValidPath(path), `uploadId=${encodeURIComponent(uploadId)}`, void 0, void 0, void 0, void 0, options.signal);
if (response.statusCode !== 204) throw await getResponseError(response, path);
}
/**
* @remarks Uses [`CompleteMultipartUpload`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html).
* @throws {RangeError} If `key` is not at least 1 character long.
* @throws {Error} If `uploadId` is not provided.
*/
async completeMultipartUpload(path, uploadId, parts, options = {}) {
if (!uploadId) throw new Error("`uploadId` is required.");
const body = xmlBuilder.build({ CompleteMultipartUpload: { Part: parts.map((part) => ({
PartNumber: part.partNumber,
ETag: part.etag
})) } });
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "POST", ensureValidPath(path), `uploadId=${encodeURIComponent(uploadId)}`, body, void 0, void 0, void 0, options.signal);
if (response.statusCode !== 200) throw await getResponseError(response, path);
const res = ensureParsedXml(await response.body.text()).CompleteMultipartUploadResult ?? {};
return {
location: res.Location || void 0,
bucket: res.Bucket || void 0,
key: res.Key || void 0,
etag: res.ETag || void 0,
checksumCRC32: res.ChecksumCRC32 || void 0,
checksumCRC32C: res.ChecksumCRC32C || void 0,
checksumCRC64NVME: res.ChecksumCRC64NVME || void 0,
checksumSHA1: res.ChecksumSHA1 || void 0,
checksumSHA256: res.ChecksumSHA256 || void 0,
checksumType: res.ChecksumType || void 0
};
}
/**
* @remarks Uses [`UploadPart`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_UploadPart.html).
* @throws {RangeError} If `key` is not at least 1 character long.
* @throws {Error} If `uploadId` is not provided.
*/
async uploadPart(path, uploadId, data, partNumber, options = {}) {
if (!uploadId) throw new Error("`uploadId` is required.");
if (!data) throw new Error("`data` is required.");
if (typeof partNumber !== "number" || partNumber <= 0) throw new Error("`partNumber` has to be a `number` which is >= 1.");
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "PUT", ensureValidPath(path), `partNumber=${partNumber}&uploadId=${encodeURIComponent(uploadId)}`, data, void 0, void 0, void 0, options.signal);
if (response.statusCode === 200) {
response.body.dump();
const etag = response.headers.etag;
if (typeof etag !== "string" || etag.length === 0) throw new S3Error("Unknown", "", { message: "Response did not contain an etag." });
return {
partNumber,
etag
};
}
throw await getResponseError(response, "");
}
/**
* @remarks Uses [`ListParts`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListParts.html).
* @throws {RangeError} If `key` is not at least 1 character long.
* @throws {Error} If `uploadId` is not provided.
* @throws {TypeError} If `options.maxParts` is not a `number`.
* @throws {RangeError} If `options.maxParts` is <= 0.
* @throws {TypeError} If `options.partNumberMarker` is not a `string`.
*/
async listParts(path, uploadId, options = {}) {
let query = "";
if (options.maxParts) {
if (typeof options.maxParts !== "number") throw new TypeError("`maxParts` must be a `number`.");
if (options.maxParts <= 0) throw new RangeError("`maxParts` must be >= 1.");
query += `&max-parts=${options.maxParts}`;
}
if (options.partNumberMarker) {
if (typeof options.partNumberMarker !== "string") throw new TypeError("`partNumberMarker` must be a `string`.");
query += `&part-number-marker=${encodeURIComponent(options.partNumberMarker)}`;
}
query += `&uploadId=${encodeURIComponent(uploadId)}`;
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "GET", ensureValidPath(path), query.substring(1), void 0, void 0, void 0, void 0, options?.signal);
if (response.statusCode === 200) {
const root = ensureParsedXml(await response.body.text()).ListPartsResult ?? {};
return {
bucket: root.Bucket,
key: root.Key,
uploadId: root.UploadId,
partNumberMarker: root.PartNumberMarker ?? void 0,
nextPartNumberMarker: root.NextPartNumberMarker ?? void 0,
maxParts: root.MaxParts ?? 1e3,
isTruncated: root.IsTruncated ?? false,
parts: root.Part?.map((part) => ({
etag: part.ETag,
lastModified: part.LastModified ? new Date(part.LastModified) : void 0,
partNumber: part.PartNumber ?? void 0,
size: part.Size ?? void 0
})) ?? []
};
}
throw await getResponseError(response, path);
}
/**
* Creates a new bucket on the S3 server.
*
* @param name The name of the bucket to create. AWS the name according to [some rules](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html). The most important ones are:
* - Bucket names must be between `3` (min) and `63` (max) characters long.
* - Bucket names can consist only of lowercase letters, numbers, periods (`.`), and hyphens (`-`).
* - Bucket names must begin and end with a letter or number.
* - Bucket names must not contain two adjacent periods.
* - Bucket names must not be formatted as an IP address (for example, `192.168.5.4`).
*
* @throws {Error} If the bucket name is invalid.
* @throws {S3Error} If the bucket could not be created, e.g. if it already exists.
* @remarks Uses [`CreateBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateBucket.html)
*/
async createBucket(name, options = {}) {
let body;
if (options) {
const location = options.location && (options.location.name || options.location.type) ? {
Name: options.location.name ?? void 0,
Type: options.location.type ?? void 0
} : void 0;
const bucket = options.info && (options.info.dataRedundancy || options.info.type) ? {
DataRedundancy: options.info.dataRedundancy ?? void 0,
Type: options.info.type ?? void 0
} : void 0;
body = location || bucket || options.locationConstraint ? xmlBuilder.build({ CreateBucketConfiguration: {
$xmlns: "http://s3.amazonaws.com/doc/2006-03-01/",
LocationConstraint: options.locationConstraint ?? void 0,
Location: location,
Bucket: bucket
} }) : void 0;
}
const additionalSignedHeaders = body ? { "content-md5": md5Base64(body) } : void 0;
const response = await this[kSignedRequest](options.region ? ensureValidRegion(options.region) : this.#options.region, options.endpoint ? ensureValidEndpoint(options.endpoint) : this.#options.endpoint, ensureValidBucketName(name), "PUT", "", void 0, body, additionalSignedHeaders, void 0, void 0, options.signal);
if (400 <= response.statusCode && response.statusCode < 500) throw await getResponseError(response, "");
response.body.dump();
if (response.statusCode === 200) return;
throw new Error(`Response code not supported: ${response.statusCode}`);
}
/**
* Deletes a bucket from the S3 server.
* @param name The name of the bucket to delete. Same restrictions as in {@link S3Client#createBucket}.
* @throws {Error} If the bucket name is invalid.
* @throws {S3Error} If the bucket could not be deleted, e.g. if it is not empty.
* @remarks Uses [`DeleteBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucket.html).
*/
async deleteBucket(name, options) {
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, ensureValidBucketName(name), "DELETE", "", void 0, void 0, void 0, void 0, void 0, options?.signal);
if (400 <= response.statusCode && response.statusCode < 500) throw await getResponseError(response, "");
response.body.dump();
if (response.statusCode === 204) return;
throw new Error(`Response code not supported: ${response.statusCode}`);
}
/**
* Checks if a bucket exists.
* @param name The name of the bucket to delete. Same restrictions as in {@link S3Client#createBucket}.
* @throws {Error} If the bucket name is invalid.
* @remarks Uses [`HeadBucket`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html).
*/
async bucketExists(name, options) {
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, ensureValidBucketName(name), "HEAD", "", void 0, void 0, void 0, void 0, void 0, options?.signal);
if (response.statusCode !== 404 && 400 <= response.statusCode && response.statusCode < 500) throw await getResponseError(response, "");
response.body.dump();
if (response.statusCode === 200) return true;
if (response.statusCode === 404) return false;
throw new Error(`Response code not supported: ${response.statusCode}`);
}
/**
* @remarks Uses [`PutBucketCors`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutBucketCors.html).
*/
async putBucketCors(rules, options = {}) {
const body = xmlBuilder.build({ CORSConfiguration: { CORSRule: rules.map((r) => ({
AllowedOrigin: r.allowedOrigins,
AllowedMethod: r.allowedMethods,
AllowedHeader: r.allowedHeaders,
ExposeHeader: r.exposeHeaders,
ID: r.id ?? void 0,
MaxAgeSeconds: r.maxAgeSeconds ?? void 0
})) } });
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "PUT", "", "cors=", body, { "content-md5": md5Base64(body) }, void 0, void 0, options.signal);
if (response.statusCode === 200) {
response.body.dump();
return;
}
if (400 <= response.statusCode && response.statusCode < 500) throw await getResponseError(response, "");
throw new Error(`Response code not implemented yet: ${response.statusCode}`);
}
/**
* @remarks Uses [`GetBucketCors`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetBucketCors.html).
*/
async getBucketCors(options = {}) {
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "GET", "", "cors=", void 0, void 0, void 0, void 0, options.signal);
if (response.statusCode !== 200) {
response.body.dump();
throw fromStatusCode(response.statusCode, "");
}
return { rules: ((ensureParsedXml(await response.body.text()).CORSConfiguration ?? {}).CORSRule ?? []).filter(Boolean).map((r) => ({
allowedMethods: r.AllowedMethod ?? [],
allowedOrigins: r.AllowedOrigin ?? [],
allowedHeaders: r.AllowedHeader ? r.AllowedHeader : void 0,
exposeHeaders: r.ExposeHeader ? r.ExposeHeader : void 0,
id: r.ID ?? void 0,
maxAgeSeconds: typeof r.MaxAgeSeconds !== "undefined" && r.MaxAgeSeconds !== null ? Number(r.MaxAgeSeconds) : void 0
})) };
}
/**
* @remarks Uses [`DeleteBucketCors`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteBucketCors.html).
*/
async deleteBucketCors(options = {}) {
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "DELETE", "", "cors=", void 0, void 0, void 0, void 0, options.signal);
if (response.statusCode !== 204) {
response.body.dump();
throw fromStatusCode(response.statusCode, "");
}
}
/**
* Uses [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) to iterate over all keys. Pagination and continuation is handled internally.
*/
async *listIterating(options) {
const maxKeys = options?.internalPageSize ?? void 0;
let continuationToken;
do {
const res = await this.list({
...options,
maxKeys,
continuationToken
});
if (!res || res.contents.length === 0) break;
yield* res.contents;
continuationToken = res.nextContinuationToken;
} while (continuationToken);
}
/**
* Implements [`ListObjectsV2`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_ListObjectsV2.html) to iterate over all keys.
*
* @throws {RangeError} If `maxKeys` is not between `1` and `1000`.
*/
async list(options = {}) {
let query = "";
if (typeof options.continuationToken !== "undefined") {
if (typeof options.continuationToken !== "string") throw new TypeError("`continuationToken` must be a `string`.");
query += `continuation-token=${encodeURIComponent(options.continuationToken)}&`;
}
query += "list-type=2";
if (typeof options.maxKeys !== "undefined") {
if (typeof options.maxKeys !== "number") throw new TypeError("`maxKeys` must be a `number`.");
if (options.maxKeys < 1 || options.maxKeys > 1e3) throw new RangeError("`maxKeys` has to be between 1 and 1000.");
query += `&max-keys=${options.maxKeys}`;
}
if (typeof options.delimiter !== "undefined") {
if (typeof options.delimiter !== "string") throw new TypeError("`delimiter` must be a `string`.");
query += `&delimiter=${options.delimiter === "/" ? "/" : encodeURIComponent(options.delimiter)}`;
}
if (options.prefix) {
if (typeof options.prefix !== "string") throw new TypeError("`prefix` must be a `string`.");
query += `&prefix=${encodeURIComponent(options.prefix)}`;
}
if (typeof options.startAfter !== "undefined") {
if (typeof options.startAfter !== "string") throw new TypeError("`startAfter` must be a `string`.");
query += `&start-after=${encodeURIComponent(options.startAfter)}`;
}
const response = await this[kSignedRequest](ensureValidRegion(this.#options.region), ensureValidEndpoint(this.#options.endpoint), options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "GET", "", query, void 0, void 0, void 0, void 0, options.signal);
if (response.statusCode !== 200) {
response.body.dump();
throw new Error(`Response code not implemented yet: ${response.statusCode}`);
}
const res = ensureParsedXml(await response.body.text()).ListBucketResult ?? {};
if (!res) throw new S3Error("Unknown", "", { message: "Could not read bucket contents." });
return {
name: res.Name,
prefix: res.Prefix,
startAfter: res.StartAfter,
isTruncated: res.IsTruncated,
continuationToken: res.ContinuationToken,
maxKeys: res.MaxKeys,
keyCount: res.KeyCount,
nextContinuationToken: res.NextContinuationToken,
contents: res.Contents?.map(S3BucketEntry.parse) ?? []
};
}
/**
* Uses [`DeleteObjects`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html) to delete multiple objects in a single request.
*/
async deleteObjects(objects, options = {}) {
const body = xmlBuilder.build({ Delete: {
Quiet: true,
Object: objects.map((o) => ({ Key: typeof o === "string" ? o : o.key }))
} });
const response = await this[kSignedRequest](this.#options.region, this.#options.endpoint, options.bucket ? ensureValidBucketName(options.bucket) : this.#options.bucket, "POST", "", "delete=", body, { "content-md5": md5Base64(body) }, void 0, void 0, options.signal);
if (response.statusCode === 200) {
const text = await response.body.text();
let deleteResult;
try {
deleteResult = ensureParsedXml(text).DeleteResult ?? {};
} catch (cause) {
throw new S3Error("Unknown", "", {
message: "S3 service responded with invalid XML.",
cause
});
}
return { errors: deleteResult.Error?.map((e) => ({
code: e.Code,
key: e.Key,
message: e.Message,
versionId: e.VersionId
})) ?? [] };
}
if (400 <= response.statusCode && response.statusCode < 500) throw await getResponseError(response, "");
response.body.dump();
throw new Error(`Response code not implemented yet: ${response.statusCode}`);
}
/**
* Do not use this. This is an internal method.
* TODO: Maybe move this into a separate free function?
* @internal
*/
async [kSignedRequest](region, endpoint, bucket, method, pathWithoutBucket, query, body, additionalSignedHeaders, additionalUnsignedHeaders, contentHash, signal) {
const url = buildRequestUrl(endpoint, bucket, region, pathWithoutBucket);
if (query) url.search = query;
const now$1 = now();
const contentHashStr = contentHash?.toString("hex") ?? unsignedPayload;
const headersToBeSigned = prepareHeadersForSigning({
host: url.host,
"x-amz-date": now$1.dateTime,
"x-amz-content-sha256": contentHashStr,
...additionalSignedHeaders
});
try {
return await request(url, {
method,
signal,
dispatcher: this.#dispatcher,
headers: {
...headersToBeSigned,
authorization: getAuthorizationHeader(this.#keyCache, method, url.pathname, query ?? "", now$1, headersToBeSigned, region, contentHashStr, this.#options.accessKeyId, this.#options.secretAccessKey),
...additionalUnsignedHeaders,
"user-agent": "lean-s3"
},
body
});
} catch (cause) {
signal?.throwIfAborted();
throw new S3Error("Unknown", pathWithoutBucket, {
message: "Unknown error during S3 request.",
cause
});
}
}
/** @internal */
async [kWrite](path, data, contentType, contentLength, contentHash, rageStart, rangeEndExclusive, signal = void 0) {
const bucket = this.#options.bucket;
const endpoint = this.#options.endpoint;
const region = this.#options.region;
const url = buildRequestUrl(endpoint, bucket, region, path);
const now$1 = now();
const contentHashStr = contentHash?.toString("hex") ?? unsignedPayload;
const headersToBeSigned = prepareHeadersForSigning({
"content-length": contentLength?.toString() ?? void 0,
"content-type": contentType,
host: url.host,
range: getRangeHeader(rageStart, rangeEndExclusive),
"x-amz-content-sha256": contentHashStr,
"x-amz-date": now$1.dateTime
});
let response;
try {
response = await request(url, {
method: "PUT",
signal,
dispatcher: this.#dispatcher,
headers: {
...headersToBeSigned,
authorization: getAuthorizationHeader(this.#keyCache, "PUT", url.pathname, url.search, now$1, headersToBeSigned, region, contentHashStr, this.#options.accessKeyId, this.#options.secretAccessKey),
"user-agent": "lean-s3"
},
body: data
});
} catch (cause) {
signal?.throwIfAborted();
throw new S3Error("Unknown", path, {
message: "Unknown error during S3 request.",
cause
});
}
const status = response.statusCode;
if (200 <= status && status < 300) return;
throw await getResponseError(response, path);
}
/**
* @internal
*/
[kStream](path, contentHash, rageStart, rangeEndExclusive) {
const bucket = this.#options.bucket;
const endpoint = this.#options.endpoint;
const region = this.#options.region;
const now$1 = now();
const url = buildRequestUrl(endpoint, bucket, region, path);
const range = getRangeHeader(rageStart, rangeEndExclusive);
const contentHashStr = contentHash?.toString("hex") ?? unsignedPayload;
const headersToBeSigned = prepareHeadersForSigning({
"amz-sdk-invocation-id": crypto.randomUUID(),
host: url.host,
range,
"x-amz-content-sha256": contentHashStr,
"x-amz-date": now$1.dateTime
});
const ac = new AbortController();
return new ReadableStream({
type: "bytes",
start: (controller) => {
const onNetworkError = (cause) => {
controller.error(new S3Error("Unknown", path, {
message: void 0,
cause
}));
};
request(url, {
method: "GET",
signal: ac.signal,
dispatcher: this.#dispatcher,
headers: {
...headersToBeSigned,
authorization: getAuthorizationHeader(this.#keyCache, "GET", url.pathname, url.search, now$1, headersToBeSigned, region, contentHashStr, this.#options.accessKeyId, this.#options.secretAccessKey),
"user-agent": "lean-s3"
}
}).then((response) => {
const onData = controller.enqueue.bind(controller);
const onClose = controller.close.bind(controller);
const expectPartialResponse = range !== void 0;
const status = response.statusCode;
if (status === 200) {
if (expectPartialResponse) return controller.error(new S3Error("Unknown", path, { message: "Expected partial response to range request." }));
response.body.on("data", onData);
response.body.once("error", onNetworkError);
response.body.once("end", onClose);
return;
}
if (status === 206) {
if (!expectPartialResponse) return controller.error(new S3Error("Unknown", path, { message: "Received partial response but expected a full response." }));
response.body.on("data", onData);
response.body.once("error", onNetworkError);
response.body.once("end", onClose);
return;
}
if (400 <= status && status < 500) {
const responseText = void 0;
if (response.headers["content-type"] === "application/xml") return response.body.text().then((body) => {
let error;
try {
error = xmlParser.parse(body);
} catch (cause) {
return controller.error(new S3Error("Unknown", path, {
message: "Could not parse XML error response.",
status: response.statusCode,
cause
}));
}
return controller.error(new S3Error(error.Error.Code || "Unknown", path, {
message: error.Error.Message || void 0,
status: response.statusCode
}));
}, onNetworkError);
return controller.error(new S3Error("Unknown", path, {
status: response.statusCode,
message: void 0,
cause: responseText
}));
}
return controller.error(/* @__PURE__ */ new Error(`Handling for status code ${status} not implemented yet. You might want to open an issue and describe your situation.`));
}, onNetworkError);
},
cancel(reason) {
ac.abort(reason);
}
});
}
[nodeUtil.inspect.custom](_depth, options = {}) {
if (options.depth === null) options.depth = 2;
options.colors ??= true;
const properties = {
endpoint: this.#options.endpoint,
bucket: this.#options.bucket,
region: this.#options.region,
accessKeyId: this.#options.accessKeyId
};
return `S3Client ${nodeUtil.formatWithOptions(options, properties)}`;
}
};
function buildSearchParams(amzCredential, date, expiresIn, headerList, contentHashStr, storageClass, sessionToken, acl, responseContentDisposition) {
let res = "";
if (acl) res += `X-Amz-Acl=${encodeURIComponent(acl)}&`;
res += "X-Amz-Algorithm=AWS4-HMAC-SHA256";
if (contentHashStr) res += `&X-Amz-Content-Sha256=${contentHashStr}`;
res += `&X-Amz-Credential=${encodeURIComponentExtended(amzCredential)}`;
res += `&X-Amz-Date=${date.dateTime}`;
res += `&X-Amz-Expires=${expiresIn}`;
if (sessionToken) res += `&X-Amz-Security-Token=${encodeURIComponent(sessionToken)}`;
res += `&X-Amz-SignedHeaders=${encodeURIComponent(headerList)}`;
if (storageClass) res += `&X-Amz-Storage-Class=${storageClass}`;
if (responseContentDisposition) res += `&response-content-disposition=${encodeURIComponentExtended(responseContentDisposition)}`;
return res;
}
function ensureParsedXml(text) {
try {
const r = xmlParser.parse(text);
if (!r) throw new S3Error("Unknown", "", { message: "S3 service responded with empty XML." });
return r;
} catch (cause) {
throw new S3Error("Unknown", "", {
message: "S3 service responded with invalid XML.",
cause
});
}
}
//#endregion
//#region src/S3File.ts
var S3File = class S3File {
#client;
#path;
#start;
#end;
#contentType;
/** @internal */
constructor(client, path, start, end, contentType) {
if (typeof start === "number" && start < 0) throw new Error("Invalid slice `start`.");
if (typeof end === "number" && (end < 0 || typeof start === "number" && end < start)) throw new Error("Invalid slice `end`.");
this.#client = client;
this.#path = path;
this.#start = start;
this.#end = end;
this.#contentType = contentType ?? "application/octet-stream";
}
/**
* Creates and returns a new {@link S3File} containing a subset of this {@link S3File} data.
* @param start The starting index.
* @param end The ending index, exclusive.
* @param contentType The content-type for the new {@link S3File}.
*/
slice(start, end, contentType) {
return new S3File(this.#client, this.#path, start ?? void 0, end ?? void 0, contentType ?? this.#contentType);
}
/**
* Get the stat of a file in the bucket. Uses `HEAD` request to check existence.
*
* @remarks Uses [`HeadObject`](https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html).
* @throws {S3Error} If the file does not exist or the server has some other issues.
* @throws {Error} If the server returns an invalid response.
*/
async stat(options = {}) {
const [region, endpoint, bucket] = this.#client[kGetEffectiveParams](options);
const response = await this.#client[kSignedRequest](region, endpoint, b