s3db.js
Version:
Use AWS S3, the world's most reliable document storage, as a database with this ORM.
1,413 lines (1,398 loc) • 466 kB
JavaScript
import { customAlphabet, urlAlphabet } from 'nanoid';
import EventEmitter from 'events';
import { mkdir, copyFile, unlink, stat, access, readdir, writeFile, readFile, rm } from 'fs/promises';
import fs, { createReadStream, createWriteStream } from 'fs';
import { pipeline } from 'stream/promises';
import path, { join } from 'path';
import crypto, { createHash } from 'crypto';
import zlib from 'node:zlib';
import { Transform, Writable } from 'stream';
import { PromisePool } from '@supercharge/promise-pool';
import { ReadableStream } from 'node:stream/web';
import { chunk, merge, isString, isEmpty, invert, uniq, cloneDeep, get, set, isObject, isFunction } from 'lodash-es';
import jsonStableStringify from 'json-stable-stringify';
import { Agent } from 'http';
import { Agent as Agent$1 } from 'https';
import { NodeHttpHandler } from '@smithy/node-http-handler';
import { S3Client, PutObjectCommand, GetObjectCommand, HeadObjectCommand, CopyObjectCommand, DeleteObjectCommand, DeleteObjectsCommand, ListObjectsV2Command } from '@aws-sdk/client-s3';
import { flatten, unflatten } from 'flat';
import FastestValidator from 'fastest-validator';
const alphabet = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
const base = alphabet.length;
const charToValue = Object.fromEntries([...alphabet].map((c, i) => [c, i]));
const encode = (n) => {
if (typeof n !== "number" || isNaN(n)) return "undefined";
if (!isFinite(n)) return "undefined";
if (n === 0) return alphabet[0];
if (n < 0) return "-" + encode(-Math.floor(n));
n = Math.floor(n);
let s = "";
while (n) {
s = alphabet[n % base] + s;
n = Math.floor(n / base);
}
return s;
};
const decode = (s) => {
if (typeof s !== "string") return NaN;
if (s === "") return 0;
let negative = false;
if (s[0] === "-") {
negative = true;
s = s.slice(1);
}
let r = 0;
for (let i = 0; i < s.length; i++) {
const idx = charToValue[s[i]];
if (idx === void 0) return NaN;
r = r * base + idx;
}
return negative ? -r : r;
};
const encodeDecimal = (n) => {
if (typeof n !== "number" || isNaN(n)) return "undefined";
if (!isFinite(n)) return "undefined";
const negative = n < 0;
n = Math.abs(n);
const [intPart, decPart] = n.toString().split(".");
const encodedInt = encode(Number(intPart));
if (decPart) {
return (negative ? "-" : "") + encodedInt + "." + decPart;
}
return (negative ? "-" : "") + encodedInt;
};
const decodeDecimal = (s) => {
if (typeof s !== "string") return NaN;
let negative = false;
if (s[0] === "-") {
negative = true;
s = s.slice(1);
}
const [intPart, decPart] = s.split(".");
const decodedInt = decode(intPart);
if (isNaN(decodedInt)) return NaN;
const num = decPart ? Number(decodedInt + "." + decPart) : decodedInt;
return negative ? -num : num;
};
const utf8BytesMemory = /* @__PURE__ */ new Map();
const UTF8_MEMORY_MAX_SIZE = 1e4;
function calculateUTF8Bytes(str) {
if (typeof str !== "string") {
str = String(str);
}
if (utf8BytesMemory.has(str)) {
return utf8BytesMemory.get(str);
}
let bytes = 0;
for (let i = 0; i < str.length; i++) {
const codePoint = str.codePointAt(i);
if (codePoint <= 127) {
bytes += 1;
} else if (codePoint <= 2047) {
bytes += 2;
} else if (codePoint <= 65535) {
bytes += 3;
} else if (codePoint <= 1114111) {
bytes += 4;
if (codePoint > 65535) {
i++;
}
}
}
if (utf8BytesMemory.size < UTF8_MEMORY_MAX_SIZE) {
utf8BytesMemory.set(str, bytes);
} else if (utf8BytesMemory.size === UTF8_MEMORY_MAX_SIZE) {
const entriesToDelete = Math.floor(UTF8_MEMORY_MAX_SIZE / 2);
let deleted = 0;
for (const key of utf8BytesMemory.keys()) {
if (deleted >= entriesToDelete) break;
utf8BytesMemory.delete(key);
deleted++;
}
utf8BytesMemory.set(str, bytes);
}
return bytes;
}
function clearUTF8Memory() {
utf8BytesMemory.clear();
}
const clearUTF8Memo = clearUTF8Memory;
const clearUTF8Cache = clearUTF8Memory;
function calculateAttributeNamesSize(mappedObject) {
let totalSize = 0;
for (const key of Object.keys(mappedObject)) {
totalSize += calculateUTF8Bytes(key);
}
return totalSize;
}
function transformValue(value) {
if (value === null || value === void 0) {
return "";
}
if (typeof value === "boolean") {
return value ? "1" : "0";
}
if (typeof value === "number") {
return String(value);
}
if (typeof value === "string") {
return value;
}
if (Array.isArray(value)) {
if (value.length === 0) {
return "[]";
}
return value.map((item) => String(item)).join("|");
}
if (typeof value === "object") {
return JSON.stringify(value);
}
return String(value);
}
function calculateAttributeSizes(mappedObject) {
const sizes = {};
for (const [key, value] of Object.entries(mappedObject)) {
const transformedValue = transformValue(value);
const byteSize = calculateUTF8Bytes(transformedValue);
sizes[key] = byteSize;
}
return sizes;
}
function calculateTotalSize(mappedObject) {
const valueSizes = calculateAttributeSizes(mappedObject);
const valueTotal = Object.values(valueSizes).reduce((total, size) => total + size, 0);
const namesSize = calculateAttributeNamesSize(mappedObject);
return valueTotal + namesSize;
}
function getSizeBreakdown(mappedObject) {
const valueSizes = calculateAttributeSizes(mappedObject);
const namesSize = calculateAttributeNamesSize(mappedObject);
const valueTotal = Object.values(valueSizes).reduce((sum, size) => sum + size, 0);
const total = valueTotal + namesSize;
const sortedAttributes = Object.entries(valueSizes).sort(([, a], [, b]) => b - a).map(([key, size]) => ({
attribute: key,
size,
percentage: (size / total * 100).toFixed(2) + "%"
}));
return {
total,
valueSizes,
namesSize,
valueTotal,
breakdown: sortedAttributes,
// Add detailed breakdown including names
detailedBreakdown: {
values: valueTotal,
names: namesSize,
total
}
};
}
function calculateSystemOverhead(config = {}) {
const { version = "1", timestamps = false, id = "" } = config;
const systemFields = {
"_v": String(version)
// Version field (e.g., "1", "10", "100")
};
if (timestamps) {
systemFields.createdAt = "2024-01-01T00:00:00.000Z";
systemFields.updatedAt = "2024-01-01T00:00:00.000Z";
}
if (id) {
systemFields.id = id;
}
const overheadObject = {};
for (const [key, value] of Object.entries(systemFields)) {
overheadObject[key] = value;
}
return calculateTotalSize(overheadObject);
}
function calculateEffectiveLimit(config = {}) {
const { s3Limit = 2048, systemConfig = {} } = config;
const overhead = calculateSystemOverhead(systemConfig);
return s3Limit - overhead;
}
class BaseError extends Error {
constructor({ verbose, bucket, key, message, code, statusCode, requestId, awsMessage, original, commandName, commandInput, metadata, suggestion, ...rest }) {
if (verbose) message = message + `
Verbose:
${JSON.stringify(rest, null, 2)}`;
super(message);
if (typeof Error.captureStackTrace === "function") {
Error.captureStackTrace(this, this.constructor);
} else {
this.stack = new Error(message).stack;
}
super.name = this.constructor.name;
this.name = this.constructor.name;
this.bucket = bucket;
this.key = key;
this.thrownAt = /* @__PURE__ */ new Date();
this.code = code;
this.statusCode = statusCode;
this.requestId = requestId;
this.awsMessage = awsMessage;
this.original = original;
this.commandName = commandName;
this.commandInput = commandInput;
this.metadata = metadata;
this.suggestion = suggestion;
this.data = { bucket, key, ...rest, verbose, message };
}
toJson() {
return {
name: this.name,
message: this.message,
code: this.code,
statusCode: this.statusCode,
requestId: this.requestId,
awsMessage: this.awsMessage,
bucket: this.bucket,
key: this.key,
thrownAt: this.thrownAt,
commandName: this.commandName,
commandInput: this.commandInput,
metadata: this.metadata,
suggestion: this.suggestion,
data: this.data,
original: this.original,
stack: this.stack
};
}
toString() {
return `${this.name} | ${this.message}`;
}
}
class S3dbError extends BaseError {
constructor(message, details = {}) {
let code, statusCode, requestId, awsMessage, original, metadata;
if (details.original) {
original = details.original;
code = original.code || original.Code || original.name;
statusCode = original.statusCode || original.$metadata && original.$metadata.httpStatusCode;
requestId = original.requestId || original.$metadata && original.$metadata.requestId;
awsMessage = original.message;
metadata = original.$metadata ? { ...original.$metadata } : void 0;
}
super({ message, ...details, code, statusCode, requestId, awsMessage, original, metadata });
}
}
class DatabaseError extends S3dbError {
constructor(message, details = {}) {
super(message, details);
Object.assign(this, details);
}
}
class ValidationError extends S3dbError {
constructor(message, details = {}) {
super(message, details);
Object.assign(this, details);
}
}
class AuthenticationError extends S3dbError {
constructor(message, details = {}) {
super(message, details);
Object.assign(this, details);
}
}
class PermissionError extends S3dbError {
constructor(message, details = {}) {
super(message, details);
Object.assign(this, details);
}
}
class EncryptionError extends S3dbError {
constructor(message, details = {}) {
super(message, details);
Object.assign(this, details);
}
}
class ResourceNotFound extends S3dbError {
constructor({ bucket, resourceName, id, original, ...rest }) {
if (typeof id !== "string") throw new Error("id must be a string");
if (typeof bucket !== "string") throw new Error("bucket must be a string");
if (typeof resourceName !== "string") throw new Error("resourceName must be a string");
super(`Resource not found: ${resourceName}/${id} [bucket:${bucket}]`, {
bucket,
resourceName,
id,
original,
...rest
});
}
}
class NoSuchBucket extends S3dbError {
constructor({ bucket, original, ...rest }) {
if (typeof bucket !== "string") throw new Error("bucket must be a string");
super(`Bucket does not exists [bucket:${bucket}]`, { bucket, original, ...rest });
}
}
class NoSuchKey extends S3dbError {
constructor({ bucket, key, resourceName, id, original, ...rest }) {
if (typeof key !== "string") throw new Error("key must be a string");
if (typeof bucket !== "string") throw new Error("bucket must be a string");
if (id !== void 0 && typeof id !== "string") throw new Error("id must be a string");
super(`No such key: ${key} [bucket:${bucket}]`, { bucket, key, resourceName, id, original, ...rest });
this.resourceName = resourceName;
this.id = id;
}
}
class NotFound extends S3dbError {
constructor({ bucket, key, resourceName, id, original, ...rest }) {
if (typeof key !== "string") throw new Error("key must be a string");
if (typeof bucket !== "string") throw new Error("bucket must be a string");
super(`Not found: ${key} [bucket:${bucket}]`, { bucket, key, resourceName, id, original, ...rest });
this.resourceName = resourceName;
this.id = id;
}
}
class MissingMetadata extends S3dbError {
constructor({ bucket, original, ...rest }) {
if (typeof bucket !== "string") throw new Error("bucket must be a string");
super(`Missing metadata for bucket [bucket:${bucket}]`, { bucket, original, ...rest });
}
}
class InvalidResourceItem extends S3dbError {
constructor({
bucket,
resourceName,
attributes,
validation,
message,
original,
...rest
}) {
if (typeof bucket !== "string") throw new Error("bucket must be a string");
if (typeof resourceName !== "string") throw new Error("resourceName must be a string");
super(
message || `Validation error: This item is not valid. Resource=${resourceName} [bucket:${bucket}].
${JSON.stringify(validation, null, 2)}`,
{
bucket,
resourceName,
attributes,
validation,
original,
...rest
}
);
}
}
class UnknownError extends S3dbError {
}
const ErrorMap = {
"NotFound": NotFound,
"NoSuchKey": NoSuchKey,
"UnknownError": UnknownError,
"NoSuchBucket": NoSuchBucket,
"MissingMetadata": MissingMetadata,
"InvalidResourceItem": InvalidResourceItem
};
function mapAwsError(err, context = {}) {
const code = err.code || err.Code || err.name;
const metadata = err.$metadata ? { ...err.$metadata } : void 0;
const commandName = context.commandName;
const commandInput = context.commandInput;
let suggestion;
if (code === "NoSuchKey" || code === "NotFound") {
suggestion = "Check if the key exists in the specified bucket and if your credentials have permission.";
return new NoSuchKey({ ...context, original: err, metadata, commandName, commandInput, suggestion });
}
if (code === "NoSuchBucket") {
suggestion = "Check if the bucket exists and if your credentials have permission.";
return new NoSuchBucket({ ...context, original: err, metadata, commandName, commandInput, suggestion });
}
if (code === "AccessDenied" || err.statusCode === 403 || code === "Forbidden") {
suggestion = "Check your credentials and bucket policy.";
return new PermissionError("Access denied", { ...context, original: err, metadata, commandName, commandInput, suggestion });
}
if (code === "ValidationError" || err.statusCode === 400) {
suggestion = "Check the request parameters and payload.";
return new ValidationError("Validation error", { ...context, original: err, metadata, commandName, commandInput, suggestion });
}
if (code === "MissingMetadata") {
suggestion = "Check if the object metadata is present and valid.";
return new MissingMetadata({ ...context, original: err, metadata, commandName, commandInput, suggestion });
}
const errorDetails = [
`Unknown error: ${err.message || err.toString()}`,
err.code && `Code: ${err.code}`,
err.statusCode && `Status: ${err.statusCode}`,
err.stack && `Stack: ${err.stack.split("\n")[0]}`
].filter(Boolean).join(" | ");
suggestion = `Check the error details and AWS documentation. Original error: ${err.message || err.toString()}`;
return new UnknownError(errorDetails, { ...context, original: err, metadata, commandName, commandInput, suggestion });
}
class ConnectionStringError extends S3dbError {
constructor(message, details = {}) {
super(message, { ...details, suggestion: "Check the connection string format and credentials." });
}
}
class CryptoError extends S3dbError {
constructor(message, details = {}) {
super(message, { ...details, suggestion: "Check if the crypto library is available and input is valid." });
}
}
class SchemaError extends S3dbError {
constructor(message, details = {}) {
super(message, { ...details, suggestion: "Check schema definition and input data." });
}
}
class ResourceError extends S3dbError {
constructor(message, details = {}) {
super(message, { ...details, suggestion: details.suggestion || "Check resource configuration, attributes, and operation context." });
Object.assign(this, details);
}
}
class PartitionError extends S3dbError {
constructor(message, details = {}) {
super(message, { ...details, suggestion: details.suggestion || "Check partition definition, fields, and input values." });
}
}
function tryFn(fnOrPromise) {
if (fnOrPromise == null) {
const err = new Error("fnOrPromise cannot be null or undefined");
err.stack = new Error().stack;
return [false, err, void 0];
}
if (typeof fnOrPromise === "function") {
try {
const result = fnOrPromise();
if (result == null) {
return [true, null, result];
}
if (typeof result.then === "function") {
return result.then((data) => [true, null, data]).catch((error) => {
if (error instanceof Error && Object.isExtensible(error)) {
const desc = Object.getOwnPropertyDescriptor(error, "stack");
if (desc && desc.writable && desc.configurable && error.hasOwnProperty("stack")) {
try {
error.stack = new Error().stack;
} catch (_) {
}
}
}
return [false, error, void 0];
});
}
return [true, null, result];
} catch (error) {
if (error instanceof Error && Object.isExtensible(error)) {
const desc = Object.getOwnPropertyDescriptor(error, "stack");
if (desc && desc.writable && desc.configurable && error.hasOwnProperty("stack")) {
try {
error.stack = new Error().stack;
} catch (_) {
}
}
}
return [false, error, void 0];
}
}
if (typeof fnOrPromise.then === "function") {
return Promise.resolve(fnOrPromise).then((data) => [true, null, data]).catch((error) => {
if (error instanceof Error && Object.isExtensible(error)) {
const desc = Object.getOwnPropertyDescriptor(error, "stack");
if (desc && desc.writable && desc.configurable && error.hasOwnProperty("stack")) {
try {
error.stack = new Error().stack;
} catch (_) {
}
}
}
return [false, error, void 0];
});
}
return [true, null, fnOrPromise];
}
function tryFnSync(fn) {
try {
const result = fn();
return [true, null, result];
} catch (err) {
return [false, err, null];
}
}
async function dynamicCrypto() {
let lib;
if (typeof process !== "undefined") {
const [ok, err, result] = await tryFn(async () => {
const { webcrypto } = await import('crypto');
return webcrypto;
});
if (ok) {
lib = result;
} else {
throw new CryptoError("Crypto API not available", { original: err, context: "dynamicCrypto" });
}
} else if (typeof window !== "undefined") {
lib = window.crypto;
}
if (!lib) throw new CryptoError("Could not load any crypto library", { context: "dynamicCrypto" });
return lib;
}
async function sha256(message) {
const [okCrypto, errCrypto, cryptoLib] = await tryFn(dynamicCrypto);
if (!okCrypto) throw new CryptoError("Crypto API not available", { original: errCrypto });
const encoder = new TextEncoder();
const data = encoder.encode(message);
const [ok, err, hashBuffer] = await tryFn(() => cryptoLib.subtle.digest("SHA-256", data));
if (!ok) throw new CryptoError("SHA-256 digest failed", { original: err, input: message });
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
return hashHex;
}
async function encrypt(content, passphrase) {
const [okCrypto, errCrypto, cryptoLib] = await tryFn(dynamicCrypto);
if (!okCrypto) throw new CryptoError("Crypto API not available", { original: errCrypto });
const salt = cryptoLib.getRandomValues(new Uint8Array(16));
const [okKey, errKey, key] = await tryFn(() => getKeyMaterial(passphrase, salt));
if (!okKey) throw new CryptoError("Key derivation failed", { original: errKey, passphrase, salt });
const iv = cryptoLib.getRandomValues(new Uint8Array(12));
const encoder = new TextEncoder();
const encodedContent = encoder.encode(content);
const [okEnc, errEnc, encryptedContent] = await tryFn(() => cryptoLib.subtle.encrypt({ name: "AES-GCM", iv }, key, encodedContent));
if (!okEnc) throw new CryptoError("Encryption failed", { original: errEnc, content });
const encryptedData = new Uint8Array(salt.length + iv.length + encryptedContent.byteLength);
encryptedData.set(salt);
encryptedData.set(iv, salt.length);
encryptedData.set(new Uint8Array(encryptedContent), salt.length + iv.length);
return arrayBufferToBase64(encryptedData);
}
async function decrypt(encryptedBase64, passphrase) {
const [okCrypto, errCrypto, cryptoLib] = await tryFn(dynamicCrypto);
if (!okCrypto) throw new CryptoError("Crypto API not available", { original: errCrypto });
const encryptedData = base64ToArrayBuffer(encryptedBase64);
const salt = encryptedData.slice(0, 16);
const iv = encryptedData.slice(16, 28);
const encryptedContent = encryptedData.slice(28);
const [okKey, errKey, key] = await tryFn(() => getKeyMaterial(passphrase, salt));
if (!okKey) throw new CryptoError("Key derivation failed (decrypt)", { original: errKey, passphrase, salt });
const [okDec, errDec, decryptedContent] = await tryFn(() => cryptoLib.subtle.decrypt({ name: "AES-GCM", iv }, key, encryptedContent));
if (!okDec) throw new CryptoError("Decryption failed", { original: errDec, encryptedBase64 });
const decoder = new TextDecoder();
return decoder.decode(decryptedContent);
}
async function md5(data) {
if (typeof process === "undefined") {
throw new CryptoError("MD5 hashing is only available in Node.js environment", { context: "md5" });
}
const [ok, err, result] = await tryFn(async () => {
const { createHash } = await import('crypto');
return createHash("md5").update(data).digest("base64");
});
if (!ok) {
throw new CryptoError("MD5 hashing failed", { original: err, data });
}
return result;
}
async function getKeyMaterial(passphrase, salt) {
const [okCrypto, errCrypto, cryptoLib] = await tryFn(dynamicCrypto);
if (!okCrypto) throw new CryptoError("Crypto API not available", { original: errCrypto });
const encoder = new TextEncoder();
const keyMaterial = encoder.encode(passphrase);
const [okImport, errImport, baseKey] = await tryFn(() => cryptoLib.subtle.importKey(
"raw",
keyMaterial,
{ name: "PBKDF2" },
false,
["deriveKey"]
));
if (!okImport) throw new CryptoError("importKey failed", { original: errImport, passphrase });
const [okDerive, errDerive, derivedKey] = await tryFn(() => cryptoLib.subtle.deriveKey(
{
name: "PBKDF2",
salt,
iterations: 1e5,
hash: "SHA-256"
},
baseKey,
{ name: "AES-GCM", length: 256 },
true,
["encrypt", "decrypt"]
));
if (!okDerive) throw new CryptoError("deriveKey failed", { original: errDerive, passphrase, salt });
return derivedKey;
}
function arrayBufferToBase64(buffer) {
if (typeof process !== "undefined") {
return Buffer.from(buffer).toString("base64");
} else {
const [ok, err, binary] = tryFnSync(() => String.fromCharCode.apply(null, new Uint8Array(buffer)));
if (!ok) throw new CryptoError("Failed to convert ArrayBuffer to base64 (browser)", { original: err });
return window.btoa(binary);
}
}
function base64ToArrayBuffer(base64) {
if (typeof process !== "undefined") {
return new Uint8Array(Buffer.from(base64, "base64"));
} else {
const [ok, err, binaryString] = tryFnSync(() => window.atob(base64));
if (!ok) throw new CryptoError("Failed to decode base64 (browser)", { original: err });
const len = binaryString.length;
const bytes = new Uint8Array(len);
for (let i = 0; i < len; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
return bytes;
}
}
const idGenerator = customAlphabet(urlAlphabet, 22);
const passwordAlphabet = "ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz23456789";
const passwordGenerator = customAlphabet(passwordAlphabet, 16);
var id = /*#__PURE__*/Object.freeze({
__proto__: null,
idGenerator: idGenerator,
passwordGenerator: passwordGenerator
});
class Plugin extends EventEmitter {
constructor(options = {}) {
super();
this.name = this.constructor.name;
this.options = options;
this.hooks = /* @__PURE__ */ new Map();
}
async setup(database) {
this.database = database;
this.beforeSetup();
await this.onSetup();
this.afterSetup();
}
async start() {
this.beforeStart();
await this.onStart();
this.afterStart();
}
async stop() {
this.beforeStop();
await this.onStop();
this.afterStop();
}
// Override these methods in subclasses
async onSetup() {
}
async onStart() {
}
async onStop() {
}
// Hook management methods
addHook(resource, event, handler) {
if (!this.hooks.has(resource)) {
this.hooks.set(resource, /* @__PURE__ */ new Map());
}
const resourceHooks = this.hooks.get(resource);
if (!resourceHooks.has(event)) {
resourceHooks.set(event, []);
}
resourceHooks.get(event).push(handler);
}
removeHook(resource, event, handler) {
const resourceHooks = this.hooks.get(resource);
if (resourceHooks && resourceHooks.has(event)) {
const handlers = resourceHooks.get(event);
const index = handlers.indexOf(handler);
if (index > -1) {
handlers.splice(index, 1);
}
}
}
// Enhanced resource method wrapping that supports multiple plugins
wrapResourceMethod(resource, methodName, wrapper) {
const originalMethod = resource[methodName];
if (!resource._pluginWrappers) {
resource._pluginWrappers = /* @__PURE__ */ new Map();
}
if (!resource._pluginWrappers.has(methodName)) {
resource._pluginWrappers.set(methodName, []);
}
resource._pluginWrappers.get(methodName).push(wrapper);
if (!resource[`_wrapped_${methodName}`]) {
resource[`_wrapped_${methodName}`] = originalMethod;
const isJestMock = originalMethod && originalMethod._isMockFunction;
resource[methodName] = async function(...args) {
let result = await resource[`_wrapped_${methodName}`](...args);
for (const wrapper2 of resource._pluginWrappers.get(methodName)) {
result = await wrapper2.call(this, result, args, methodName);
}
return result;
};
if (isJestMock) {
Object.setPrototypeOf(resource[methodName], Object.getPrototypeOf(originalMethod));
Object.assign(resource[methodName], originalMethod);
}
}
}
/**
* Add a middleware to intercept a resource method (Koa/Express style).
* Middleware signature: async (next, ...args) => { ... }
* - Chame next(...args) para continuar a cadeia.
* - Retorne sem chamar next para interromper.
* - Pode modificar argumentos/resultados.
*/
addMiddleware(resource, methodName, middleware) {
if (!resource._pluginMiddlewares) {
resource._pluginMiddlewares = {};
}
if (!resource._pluginMiddlewares[methodName]) {
resource._pluginMiddlewares[methodName] = [];
const originalMethod = resource[methodName].bind(resource);
resource[methodName] = async function(...args) {
let idx = -1;
const next = async (...nextArgs) => {
idx++;
if (idx < resource._pluginMiddlewares[methodName].length) {
return await resource._pluginMiddlewares[methodName][idx].call(this, next, ...nextArgs);
} else {
return await originalMethod(...nextArgs);
}
};
return await next(...args);
};
}
resource._pluginMiddlewares[methodName].push(middleware);
}
// Partition-aware helper methods
getPartitionValues(data, resource) {
if (!resource.config?.partitions) return {};
const partitionValues = {};
for (const [partitionName, partitionDef] of Object.entries(resource.config.partitions)) {
if (partitionDef.fields) {
partitionValues[partitionName] = {};
for (const [fieldName, rule] of Object.entries(partitionDef.fields)) {
const value = this.getNestedFieldValue(data, fieldName);
if (value !== null && value !== void 0) {
partitionValues[partitionName][fieldName] = resource.applyPartitionRule(value, rule);
}
}
} else {
partitionValues[partitionName] = {};
}
}
return partitionValues;
}
getNestedFieldValue(data, fieldPath) {
if (!fieldPath.includes(".")) {
return data[fieldPath] ?? null;
}
const keys = fieldPath.split(".");
let value = data;
for (const key of keys) {
if (value && typeof value === "object" && key in value) {
value = value[key];
} else {
return null;
}
}
return value ?? null;
}
// Event emission methods
beforeSetup() {
this.emit("plugin.beforeSetup", /* @__PURE__ */ new Date());
}
afterSetup() {
this.emit("plugin.afterSetup", /* @__PURE__ */ new Date());
}
beforeStart() {
this.emit("plugin.beforeStart", /* @__PURE__ */ new Date());
}
afterStart() {
this.emit("plugin.afterStart", /* @__PURE__ */ new Date());
}
beforeStop() {
this.emit("plugin.beforeStop", /* @__PURE__ */ new Date());
}
afterStop() {
this.emit("plugin.afterStop", /* @__PURE__ */ new Date());
}
}
const PluginObject = {
setup(database) {
},
start() {
},
stop() {
}
};
class AuditPlugin extends Plugin {
constructor(options = {}) {
super(options);
this.auditResource = null;
this.config = {
includeData: options.includeData !== false,
includePartitions: options.includePartitions !== false,
maxDataSize: options.maxDataSize || 1e4,
...options
};
}
async onSetup() {
const [ok, err, auditResource] = await tryFn(() => this.database.createResource({
name: "audits",
attributes: {
id: "string|required",
resourceName: "string|required",
operation: "string|required",
recordId: "string|required",
userId: "string|optional",
timestamp: "string|required",
oldData: "string|optional",
newData: "string|optional",
partition: "string|optional",
partitionValues: "string|optional",
metadata: "string|optional"
},
behavior: "body-overflow"
}));
this.auditResource = ok ? auditResource : this.database.resources.audits || null;
if (!ok && !this.auditResource) return;
this.database.addHook("afterCreateResource", (context) => {
if (context.resource.name !== "audits") {
this.setupResourceAuditing(context.resource);
}
});
for (const resource of Object.values(this.database.resources)) {
if (resource.name !== "audits") {
this.setupResourceAuditing(resource);
}
}
}
async onStart() {
}
async onStop() {
}
setupResourceAuditing(resource) {
resource.on("insert", async (data) => {
const partitionValues = this.config.includePartitions ? this.getPartitionValues(data, resource) : null;
await this.logAudit({
resourceName: resource.name,
operation: "insert",
recordId: data.id || "auto-generated",
oldData: null,
newData: this.config.includeData ? JSON.stringify(this.truncateData(data)) : null,
partition: partitionValues ? this.getPrimaryPartition(partitionValues) : null,
partitionValues: partitionValues ? JSON.stringify(partitionValues) : null
});
});
resource.on("update", async (data) => {
let oldData = data.$before;
if (this.config.includeData && !oldData) {
const [ok, err, fetched] = await tryFn(() => resource.get(data.id));
if (ok) oldData = fetched;
}
const partitionValues = this.config.includePartitions ? this.getPartitionValues(data, resource) : null;
await this.logAudit({
resourceName: resource.name,
operation: "update",
recordId: data.id,
oldData: oldData && this.config.includeData ? JSON.stringify(this.truncateData(oldData)) : null,
newData: this.config.includeData ? JSON.stringify(this.truncateData(data)) : null,
partition: partitionValues ? this.getPrimaryPartition(partitionValues) : null,
partitionValues: partitionValues ? JSON.stringify(partitionValues) : null
});
});
resource.on("delete", async (data) => {
let oldData = data;
if (this.config.includeData && !oldData) {
const [ok, err, fetched] = await tryFn(() => resource.get(data.id));
if (ok) oldData = fetched;
}
const partitionValues = oldData && this.config.includePartitions ? this.getPartitionValues(oldData, resource) : null;
await this.logAudit({
resourceName: resource.name,
operation: "delete",
recordId: data.id,
oldData: oldData && this.config.includeData ? JSON.stringify(this.truncateData(oldData)) : null,
newData: null,
partition: partitionValues ? this.getPrimaryPartition(partitionValues) : null,
partitionValues: partitionValues ? JSON.stringify(partitionValues) : null
});
});
const originalDeleteMany = resource.deleteMany.bind(resource);
const plugin = this;
resource.deleteMany = async function(ids) {
const objectsToDelete = [];
for (const id of ids) {
const [ok, err, fetched] = await tryFn(() => resource.get(id));
if (ok) {
objectsToDelete.push(fetched);
} else {
objectsToDelete.push({ id });
}
}
const result = await originalDeleteMany(ids);
for (const oldData of objectsToDelete) {
const partitionValues = oldData && plugin.config.includePartitions ? plugin.getPartitionValues(oldData, resource) : null;
await plugin.logAudit({
resourceName: resource.name,
operation: "deleteMany",
recordId: oldData.id,
oldData: oldData && plugin.config.includeData ? JSON.stringify(plugin.truncateData(oldData)) : null,
newData: null,
partition: partitionValues ? plugin.getPrimaryPartition(partitionValues) : null,
partitionValues: partitionValues ? JSON.stringify(partitionValues) : null
});
}
return result;
};
resource._originalDeleteMany = originalDeleteMany;
}
// Backward compatibility for tests
installEventListenersForResource(resource) {
return this.setupResourceAuditing(resource);
}
async logAudit(auditData) {
if (!this.auditResource) {
return;
}
const auditRecord = {
id: `audit-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`,
userId: this.getCurrentUserId?.() || "system",
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
metadata: JSON.stringify({ source: "audit-plugin", version: "2.0" }),
resourceName: auditData.resourceName,
operation: auditData.operation,
recordId: auditData.recordId
};
if (auditData.oldData !== null) {
auditRecord.oldData = auditData.oldData;
}
if (auditData.newData !== null) {
auditRecord.newData = auditData.newData;
}
if (auditData.partition !== null) {
auditRecord.partition = auditData.partition;
}
if (auditData.partitionValues !== null) {
auditRecord.partitionValues = auditData.partitionValues;
}
try {
await this.auditResource.insert(auditRecord);
} catch (error) {
console.warn("Audit logging failed:", error.message);
}
}
getPartitionValues(data, resource) {
if (!this.config.includePartitions) return null;
const partitions = resource.config?.partitions || resource.partitions;
if (!partitions) {
return null;
}
const partitionValues = {};
for (const [partitionName, partitionConfig] of Object.entries(partitions)) {
const values = {};
for (const field of Object.keys(partitionConfig.fields)) {
values[field] = this.getNestedFieldValue(data, field);
}
if (Object.values(values).some((v) => v !== void 0 && v !== null)) {
partitionValues[partitionName] = values;
}
}
return Object.keys(partitionValues).length > 0 ? partitionValues : null;
}
getNestedFieldValue(data, fieldPath) {
const parts = fieldPath.split(".");
let value = data;
for (const part of parts) {
if (value && typeof value === "object" && part in value) {
value = value[part];
} else {
return void 0;
}
}
return value;
}
getPrimaryPartition(partitionValues) {
if (!partitionValues) return null;
const partitionNames = Object.keys(partitionValues);
return partitionNames.length > 0 ? partitionNames[0] : null;
}
truncateData(data) {
if (!this.config.includeData) return null;
const dataStr = JSON.stringify(data);
if (dataStr.length <= this.config.maxDataSize) {
return data;
}
return {
...data,
_truncated: true,
_originalSize: dataStr.length,
_truncatedAt: (/* @__PURE__ */ new Date()).toISOString()
};
}
async getAuditLogs(options = {}) {
if (!this.auditResource) return [];
const { resourceName, operation, recordId, partition, startDate, endDate, limit = 100, offset = 0 } = options;
const hasFilters = resourceName || operation || recordId || partition || startDate || endDate;
let items = [];
if (hasFilters) {
const fetchSize = Math.min(1e4, Math.max(1e3, (limit + offset) * 20));
const result = await this.auditResource.list({ limit: fetchSize });
items = result || [];
if (resourceName) {
items = items.filter((log) => log.resourceName === resourceName);
}
if (operation) {
items = items.filter((log) => log.operation === operation);
}
if (recordId) {
items = items.filter((log) => log.recordId === recordId);
}
if (partition) {
items = items.filter((log) => log.partition === partition);
}
if (startDate || endDate) {
items = items.filter((log) => {
const timestamp = new Date(log.timestamp);
if (startDate && timestamp < new Date(startDate)) return false;
if (endDate && timestamp > new Date(endDate)) return false;
return true;
});
}
return items.slice(offset, offset + limit);
} else {
const result = await this.auditResource.page({ size: limit, offset });
return result.items || [];
}
}
async getRecordHistory(resourceName, recordId) {
return await this.getAuditLogs({ resourceName, recordId });
}
async getPartitionHistory(resourceName, partitionName, partitionValues) {
return await this.getAuditLogs({
resourceName,
partition: partitionName,
partitionValues: JSON.stringify(partitionValues)
});
}
async getAuditStats(options = {}) {
const logs = await this.getAuditLogs(options);
const stats = {
total: logs.length,
byOperation: {},
byResource: {},
byPartition: {},
byUser: {},
timeline: {}
};
for (const log of logs) {
stats.byOperation[log.operation] = (stats.byOperation[log.operation] || 0) + 1;
stats.byResource[log.resourceName] = (stats.byResource[log.resourceName] || 0) + 1;
if (log.partition) {
stats.byPartition[log.partition] = (stats.byPartition[log.partition] || 0) + 1;
}
stats.byUser[log.userId] = (stats.byUser[log.userId] || 0) + 1;
const date = log.timestamp.split("T")[0];
stats.timeline[date] = (stats.timeline[date] || 0) + 1;
}
return stats;
}
}
class BaseBackupDriver {
constructor(config = {}) {
this.config = {
compression: "gzip",
encryption: null,
verbose: false,
...config
};
}
/**
* Initialize the driver
* @param {Database} database - S3DB database instance
*/
async setup(database) {
this.database = database;
await this.onSetup();
}
/**
* Override this method to perform driver-specific setup
*/
async onSetup() {
}
/**
* Upload a backup file to the destination
* @param {string} filePath - Path to the backup file
* @param {string} backupId - Unique backup identifier
* @param {Object} manifest - Backup manifest with metadata
* @returns {Object} Upload result with destination info
*/
async upload(filePath, backupId, manifest) {
throw new Error("upload() method must be implemented by subclass");
}
/**
* Download a backup file from the destination
* @param {string} backupId - Unique backup identifier
* @param {string} targetPath - Local path to save the backup
* @param {Object} metadata - Backup metadata
* @returns {string} Path to downloaded file
*/
async download(backupId, targetPath, metadata) {
throw new Error("download() method must be implemented by subclass");
}
/**
* Delete a backup from the destination
* @param {string} backupId - Unique backup identifier
* @param {Object} metadata - Backup metadata
*/
async delete(backupId, metadata) {
throw new Error("delete() method must be implemented by subclass");
}
/**
* List backups available in the destination
* @param {Object} options - List options (limit, prefix, etc.)
* @returns {Array} List of backup metadata
*/
async list(options = {}) {
throw new Error("list() method must be implemented by subclass");
}
/**
* Verify backup integrity
* @param {string} backupId - Unique backup identifier
* @param {string} expectedChecksum - Expected file checksum
* @param {Object} metadata - Backup metadata
* @returns {boolean} True if backup is valid
*/
async verify(backupId, expectedChecksum, metadata) {
throw new Error("verify() method must be implemented by subclass");
}
/**
* Get driver type identifier
* @returns {string} Driver type
*/
getType() {
throw new Error("getType() method must be implemented by subclass");
}
/**
* Get driver-specific storage info
* @returns {Object} Storage information
*/
getStorageInfo() {
return {
type: this.getType(),
config: this.config
};
}
/**
* Clean up resources
*/
async cleanup() {
}
/**
* Log message if verbose mode is enabled
* @param {string} message - Message to log
*/
log(message) {
if (this.config.verbose) {
console.log(`[${this.getType()}BackupDriver] ${message}`);
}
}
}
class FilesystemBackupDriver extends BaseBackupDriver {
constructor(config = {}) {
super({
path: "./backups/{date}/",
permissions: 420,
directoryPermissions: 493,
...config
});
}
getType() {
return "filesystem";
}
async onSetup() {
if (!this.config.path) {
throw new Error("FilesystemBackupDriver: path configuration is required");
}
this.log(`Initialized with path: ${this.config.path}`);
}
/**
* Resolve path template variables
* @param {string} backupId - Backup identifier
* @param {Object} manifest - Backup manifest
* @returns {string} Resolved path
*/
resolvePath(backupId, manifest = {}) {
const now = /* @__PURE__ */ new Date();
const dateStr = now.toISOString().slice(0, 10);
const timeStr = now.toISOString().slice(11, 19).replace(/:/g, "-");
return this.config.path.replace("{date}", dateStr).replace("{time}", timeStr).replace("{year}", now.getFullYear().toString()).replace("{month}", (now.getMonth() + 1).toString().padStart(2, "0")).replace("{day}", now.getDate().toString().padStart(2, "0")).replace("{backupId}", backupId).replace("{type}", manifest.type || "backup");
}
async upload(filePath, backupId, manifest) {
const targetDir = this.resolvePath(backupId, manifest);
const targetPath = path.join(targetDir, `${backupId}.backup`);
const manifestPath = path.join(targetDir, `${backupId}.manifest.json`);
const [createDirOk, createDirErr] = await tryFn(
() => mkdir(targetDir, { recursive: true, mode: this.config.directoryPermissions })
);
if (!createDirOk) {
throw new Error(`Failed to create backup directory: ${createDirErr.message}`);
}
const [copyOk, copyErr] = await tryFn(() => copyFile(filePath, targetPath));
if (!copyOk) {
throw new Error(`Failed to copy backup file: ${copyErr.message}`);
}
const [manifestOk, manifestErr] = await tryFn(
() => import('fs/promises').then((fs) => fs.writeFile(
manifestPath,
JSON.stringify(manifest, null, 2),
{ mode: this.config.permissions }
))
);
if (!manifestOk) {
await tryFn(() => unlink(targetPath));
throw new Error(`Failed to write manifest: ${manifestErr.message}`);
}
const [statOk, , stats] = await tryFn(() => stat(targetPath));
const size = statOk ? stats.size : 0;
this.log(`Uploaded backup ${backupId} to ${targetPath} (${size} bytes)`);
return {
path: targetPath,
manifestPath,
size,
uploadedAt: (/* @__PURE__ */ new Date()).toISOString()
};
}
async download(backupId, targetPath, metadata) {
const sourcePath = metadata.path || path.join(
this.resolvePath(backupId, metadata),
`${backupId}.backup`
);
const [existsOk] = await tryFn(() => access(sourcePath));
if (!existsOk) {
throw new Error(`Backup file not found: ${sourcePath}`);
}
const targetDir = path.dirname(targetPath);
await tryFn(() => mkdir(targetDir, { recursive: true }));
const [copyOk, copyErr] = await tryFn(() => copyFile(sourcePath, targetPath));
if (!copyOk) {
throw new Error(`Failed to download backup: ${copyErr.message}`);
}
this.log(`Downloaded backup ${backupId} from ${sourcePath} to ${targetPath}`);
return targetPath;
}
async delete(backupId, metadata) {
const backupPath = metadata.path || path.join(
this.resolvePath(backupId, metadata),
`${backupId}.backup`
);
const manifestPath = metadata.manifestPath || path.join(
this.resolvePath(backupId, metadata),
`${backupId}.manifest.json`
);
const [deleteBackupOk] = await tryFn(() => unlink(backupPath));
const [deleteManifestOk] = await tryFn(() => unlink(manifestPath));
if (!deleteBackupOk && !deleteManifestOk) {
throw new Error(`Failed to delete backup files for ${backupId}`);
}
this.log(`Deleted backup ${backupId}`);
}
async list(options = {}) {
const { limit = 50, prefix = "" } = options;
const basePath = this.resolvePath("*").replace("*", "");
try {
const results = [];
await this._scanDirectory(path.dirname(basePath), prefix, results, limit);
results.sort((a, b) => new Date(b.createdAt) - new Date(a.createdAt));
return results.slice(0, limit);
} catch (error) {
this.log(`Error listing backups: ${error.message}`);
return [];
}
}
async _scanDirectory(dirPath, prefix, results, limit) {
if (results.length >= limit) return;
const [readDirOk, , files] = await tryFn(() => readdir(dirPath));
if (!readDirOk) return;
for (const file of files) {
if (results.length >= limit) break;
const fullPath = path.join(dirPath, file);
const [statOk, , stats] = await tryFn(() => stat(fullPath));
if (!statOk) continue;
if (stats.isDirectory()) {
await this._scanDirectory(fullPath, prefix, results, limit);
} else if (file.endsWith(".manifest.json")) {
const [readOk, , content] = await tryFn(
() => import('fs/promises').then((fs) => fs.readFile(fullPath, "utf8"))
);
if (readOk) {
try {
const manifest = JSON.parse(content);
const backupId = file.replace(".manifest.json", "");
if (!prefix || backupId.includes(prefix)) {
results.push({
id: backupId,
path: fullPath.replace(".manifest.json", ".backup"),
manifestPath: fullPath,
size: stats.size,
createdAt: manifest.createdAt || stats.birthtime.toISOString(),
...manifest
});
}
} catch (parseErr) {
this.log(`Failed to parse manifest ${fullPath}: ${parseErr.message}`);
}
}
}
}
}
async verify(backupId, expectedChecksum, metadata) {
const backupPath = metadata.path || path.join(
this.resolvePath(backupId, metadata),
`${backupId}.backup`
);
const [readOk, readErr] = await tryFn(async () => {
const hash = crypto.createHash("sha256");
const stream = createReadStream(backupPath);
await pipeline(stream, hash);
const actualChecksum = hash.digest("hex");
return actualChecksum === expectedChecksum;
});
if (!readOk) {
this.log(`Verification failed for ${backupId}: ${readErr.message}`);
return false;
}
return readOk;
}
getStorageInfo() {
return {
...super.getStorageInfo(),
path: this.config.path,
permissions: this.config.permissions,
directoryPermissions: this.config.directoryPermissions
};
}
}
class S3BackupDriver extends BaseBackupDriver {
constructor(config = {}) {
super({
bucket: null,
// Will use database bucket if not specified
path: "backups/{date}/",
storageClass: "STANDARD_IA",
serverSideEncryption: "AES256",
client: null,
// Will use database client if not specified
...config
});
}
getType() {
return "s3";
}
async onSetup() {
if (!this.config.client) {
this.config.client = this.database.client;
}
if (!this.config.bucket) {
this.config.bucket = this.database.bucket;
}
if (!this.config.client) {
throw new Error("S3BackupDriver: client is required (either via config or database)");
}
if (!this.config.bucket) {
throw new Error("S3BackupDriver: bucket is required (either via config or database)");
}
this.log(`Initialized with bucket: ${this.config.bucket}, path: ${this.config.path}`);
}
/**
* Resolve S3 key template variables
* @param {string} backupId - Backup identifier
* @param {Object} manifest - Back