UNPKG

@electric-sql/pglite

Version:

PGlite is a WASM Postgres build packaged into a TypeScript client library that enables you to run Postgres in the browser, Node.js and Bun, with no need to install any other dependencies. It is only 3.7mb gzipped.

1 lines 103 kB
{"version":3,"sources":["../../../../node_modules/.pnpm/tsup@8.3.0_@microsoft+api-extractor@7.47.7_@types+node@20.16.11__postcss@8.4.47_tsx@4.19.1_typescript@5.6.3/node_modules/tsup/assets/cjs_shims.js","../../../../node_modules/.pnpm/tinytar@0.1.0/node_modules/tinytar/lib/utils.js","../../../../node_modules/.pnpm/tinytar@0.1.0/node_modules/tinytar/lib/constants.js","../../../../node_modules/.pnpm/tinytar@0.1.0/node_modules/tinytar/lib/types.js","../../../../node_modules/.pnpm/tinytar@0.1.0/node_modules/tinytar/lib/tar.js","../../../../node_modules/.pnpm/tinytar@0.1.0/node_modules/tinytar/lib/untar.js","../../../../node_modules/.pnpm/tinytar@0.1.0/node_modules/tinytar/index.js","../../src/fs/opfs-ahp.ts","../../src/fs/base.ts","../../src/fs/tarUtils.ts"],"sourcesContent":["// Shim globals in cjs bundle\n// There's a weird bug that esbuild will always inject importMetaUrl\n// if we export it as `const importMetaUrl = ... __filename ...`\n// But using a function will not cause this issue\n\nconst getImportMetaUrl = () =>\n typeof document === 'undefined'\n ? new URL(`file:${__filename}`).href\n : (document.currentScript && document.currentScript.src) ||\n new URL('main.js', document.baseURI).href\n\nexport const importMetaUrl = /* @__PURE__ */ getImportMetaUrl()\n","'use strict';\n\nvar MAX_SAFE_INTEGER = 9007199254740991;\n\nvar undefined = (function(undefined) {\n return undefined;\n})();\n\nfunction isUndefined(value) {\n return value === undefined;\n}\n\nfunction isString(value) {\n return (typeof value == 'string') ||\n (Object.prototype.toString.call(value) == '[object String]');\n}\n\nfunction isDateTime(value) {\n return (Object.prototype.toString.call(value) == '[object Date]');\n}\n\nfunction isObject(value) {\n return (value !== null) && (typeof value == 'object');\n}\n\nfunction isFunction(value) {\n return typeof value == 'function';\n}\n\nfunction isLength(value) {\n return (typeof value == 'number') &&\n (value > -1) && (value % 1 == 0) &&\n (value <= MAX_SAFE_INTEGER);\n}\n\nfunction isArray(value) {\n return Object.prototype.toString.call(value) == '[object Array]';\n}\n\nfunction isArrayLike(value) {\n return isObject(value) && !isFunction(value) && isLength(value.length);\n}\n\nfunction isArrayBuffer(value) {\n return Object.prototype.toString.call(value) == '[object ArrayBuffer]';\n}\n\nfunction map(array, iteratee) {\n return Array.prototype.map.call(array, iteratee);\n}\n\nfunction find(array, iteratee) {\n var result = undefined;\n\n if (isFunction(iteratee)) {\n Array.prototype.every.call(array, function(item, index, array) {\n var found = iteratee(item, index, array);\n if (found) {\n result = item;\n }\n return !found; // continue if not found\n });\n }\n\n return result;\n}\n\nfunction extend(target /* ...sources */) {\n return Object.assign.apply(null, arguments);\n}\n\nfunction toUint8Array(value) {\n var i;\n var length;\n var result;\n\n if (isString(value)) {\n length = value.length;\n result = new Uint8Array(length);\n for (i = 0; i < length; i++) {\n result[i] = value.charCodeAt(i) & 0xFF;\n }\n return result;\n }\n\n if (isArrayBuffer(value)) {\n return new Uint8Array(value);\n }\n\n if (isObject(value) && isArrayBuffer(value.buffer)) {\n return new Uint8Array(value.buffer);\n }\n\n if (isArrayLike(value)) {\n return new Uint8Array(value);\n }\n\n if (isObject(value) && isFunction(value.toString)) {\n return toUint8Array(value.toString());\n }\n\n return new Uint8Array();\n}\n\nmodule.exports.MAX_SAFE_INTEGER = MAX_SAFE_INTEGER;\n\nmodule.exports.isUndefined = isUndefined;\nmodule.exports.isString = isString;\nmodule.exports.isObject = isObject;\nmodule.exports.isDateTime = isDateTime;\nmodule.exports.isFunction = isFunction;\nmodule.exports.isArray = isArray;\nmodule.exports.isArrayLike = isArrayLike;\nmodule.exports.isArrayBuffer = isArrayBuffer;\nmodule.exports.map = map;\nmodule.exports.find = find;\nmodule.exports.extend = extend;\nmodule.exports.toUint8Array = toUint8Array;\n","'use strict';\n\nvar NULL_CHAR = '\\u0000';\n\nmodule.exports = {\n /* eslint-disable key-spacing */\n\n NULL_CHAR: NULL_CHAR,\n\n TMAGIC: 'ustar' + NULL_CHAR + '00', // 'ustar', NULL, '00'\n OLDGNU_MAGIC: 'ustar ' + NULL_CHAR, // 'ustar ', NULL\n\n // Values used in typeflag field.\n REGTYPE: 0, // regular file\n LNKTYPE: 1, // link\n SYMTYPE: 2, // reserved\n CHRTYPE: 3, // character special\n BLKTYPE: 4, // block special\n DIRTYPE: 5, // directory\n FIFOTYPE: 6, // FIFO special\n CONTTYPE: 7, // reserved\n\n // Bits used in the mode field, values in octal.\n TSUID: parseInt('4000', 8), // set UID on execution\n TSGID: parseInt('2000', 8), // set GID on execution\n TSVTX: parseInt('1000', 8), // reserved\n\n // file permissions\n TUREAD: parseInt('0400', 8), // read by owner\n TUWRITE: parseInt('0200', 8), // write by owner\n TUEXEC: parseInt('0100', 8), // execute/search by owner\n TGREAD: parseInt('0040', 8), // read by group\n TGWRITE: parseInt('0020', 8), // write by group\n TGEXEC: parseInt('0010', 8), // execute/search by group\n TOREAD: parseInt('0004', 8), // read by other\n TOWRITE: parseInt('0002', 8), // write by other\n TOEXEC: parseInt('0001', 8), // execute/search by other\n\n TPERMALL: parseInt('0777', 8), // rwxrwxrwx\n TPERMMASK: parseInt('0777', 8) // permissions bitmask\n\n /* eslint-enable key-spacing */\n};\n","'use strict';\n\nvar utils = require('./utils');\nvar constants = require('./constants');\n\nvar recordSize = 512;\nvar defaultFileMode = constants.TPERMALL; // rwxrwxrwx\nvar defaultUid = 0; // root\nvar defaultGid = 0; // root\n\n/*\n struct posix_header { // byte offset\n char name[100]; // 0\n char mode[8]; // 100\n char uid[8]; // 108\n char gid[8]; // 116\n char size[12]; // 124\n char mtime[12]; // 136\n char chksum[8]; // 148\n char typeflag; // 156\n char linkname[100]; // 157\n char magic[6]; // 257\n char version[2]; // 263\n char uname[32]; // 265\n char gname[32]; // 297\n char devmajor[8]; // 329\n char devminor[8]; // 337\n char prefix[131]; // 345\n char atime[12]; // 476\n char ctime[12]; // 488\n };\n */\n\nvar posixHeader = [\n // <field name>, <size>, <offset>, <used>, <format>, <parse>, [ <check> ]\n ['name', 100, 0, function(file, field) {\n return formatTarString(file[field[0]], field[1]);\n }, function(buffer, offset, field) {\n return parseTarString(buffer.slice(offset, offset + field[1]));\n }],\n ['mode', 8, 100, function(file, field) {\n var mode = file[field[0]] || defaultFileMode;\n mode = mode & constants.TPERMMASK;\n return formatTarNumber(mode, field[1], defaultFileMode);\n }, function(buffer, offset, field) {\n var result = parseTarNumber(buffer.slice(offset, offset + field[1]));\n result &= constants.TPERMMASK;\n return result;\n }],\n ['uid', 8, 108, function(file, field) {\n return formatTarNumber(file[field[0]], field[1], defaultUid);\n }, function(buffer, offset, field) {\n return parseTarNumber(buffer.slice(offset, offset + field[1]));\n }],\n ['gid', 8, 116, function(file, field) {\n return formatTarNumber(file[field[0]], field[1], defaultGid);\n }, function(buffer, offset, field) {\n return parseTarNumber(buffer.slice(offset, offset + field[1]));\n }],\n ['size', 12, 124, function(file, field) {\n return formatTarNumber(file.data.length, field[1]);\n }, function(buffer, offset, field) {\n return parseTarNumber(buffer.slice(offset, offset + field[1]));\n }],\n ['modifyTime', 12, 136, function(file, field) {\n return formatTarDateTime(file[field[0]], field[1]);\n }, function(buffer, offset, field) {\n return parseTarDateTime(buffer.slice(offset, offset + field[1]));\n }],\n ['checksum', 8, 148, function(file, field) {\n return ' '; // placeholder\n }, function(buffer, offset, field) {\n return parseTarNumber(buffer.slice(offset, offset + field[1]));\n }],\n ['type', 1, 156, function(file, field) {\n // get last octal digit; 0 - regular file\n return '' + ((parseInt(file[field[0]], 10) || 0) % 8);\n }, function(buffer, offset, field) {\n return (parseInt(String.fromCharCode(buffer[offset]), 10) || 0) % 8;\n }],\n ['linkName', 100, 157, function(file, field) {\n return ''; // only regular files are supported\n }, function(buffer, offset, field) {\n return parseTarString(buffer.slice(offset, offset + field[1]));\n }],\n ['ustar', 8, 257, function(file, field) {\n return constants.TMAGIC; // magic + version\n }, function(buffer, offset, field) {\n return fixUstarMagic(\n parseTarString(buffer.slice(offset, offset + field[1]), true)\n );\n }, function(file, field) {\n return (file[field[0]] == constants.TMAGIC) ||\n (file[field[0]] == constants.OLDGNU_MAGIC);\n }],\n ['owner', 32, 265, function(file, field) {\n return formatTarString(file[field[0]], field[1]);\n }, function(buffer, offset, field) {\n return parseTarString(buffer.slice(offset, offset + field[1]));\n }],\n ['group', 32, 297, function(file, field) {\n return formatTarString(file[field[0]], field[1]);\n }, function(buffer, offset, field) {\n return parseTarString(buffer.slice(offset, offset + field[1]));\n }],\n ['majorNumber', 8, 329, function(file, field) {\n return ''; // only regular files are supported\n }, function(buffer, offset, field) {\n return parseTarNumber(buffer.slice(offset, offset + field[1]));\n }],\n ['minorNumber', 8, 337, function(file, field) {\n return ''; // only regular files are supported\n }, function(buffer, offset, field) {\n return parseTarNumber(buffer.slice(offset, offset + field[1]));\n }],\n ['prefix', 131, 345, function(file, field) {\n return formatTarString(file[field[0]], field[1]);\n }, function(buffer, offset, field) {\n return parseTarString(buffer.slice(offset, offset + field[1]));\n }],\n ['accessTime', 12, 476, function(file, field) {\n return formatTarDateTime(file[field[0]], field[1]);\n }, function(buffer, offset, field) {\n return parseTarDateTime(buffer.slice(offset, offset + field[1]));\n }],\n ['createTime', 12, 488, function(file, field) {\n return formatTarDateTime(file[field[0]], field[1]);\n }, function(buffer, offset, field) {\n return parseTarDateTime(buffer.slice(offset, offset + field[1]));\n }]\n];\n\nvar effectiveHeaderSize = (function(header) {\n var last = header[header.length - 1];\n return last[2] + last[1]; // offset + size\n})(posixHeader);\n\nfunction fixUstarMagic(value) {\n if (value.length == 8) {\n var chars = value.split('');\n\n if (chars[5] == constants.NULL_CHAR) {\n // TMAGIC ?\n if ((chars[6] == ' ') || (chars[6] == constants.NULL_CHAR)) {\n chars[6] = '0';\n }\n if ((chars[7] == ' ') || (chars[7] == constants.NULL_CHAR)) {\n chars[7] = '0';\n }\n chars = chars.join('');\n return chars == constants.TMAGIC ? chars : value;\n } else if (chars[7] == constants.NULL_CHAR) {\n // OLDGNU_MAGIC ?\n if (chars[5] == constants.NULL_CHAR) {\n chars[5] = ' ';\n }\n if (chars[6] == constants.NULL_CHAR) {\n chars[6] = ' ';\n }\n return chars == constants.OLDGNU_MAGIC ? chars : value;\n }\n }\n return value;\n}\n\nfunction formatTarString(value, length) {\n length -= 1; // preserve space for trailing null-char\n if (utils.isUndefined(value)) {\n value = '';\n }\n value = ('' + value).substr(0, length);\n return value + constants.NULL_CHAR;\n}\n\nfunction formatTarNumber(value, length, defaultValue) {\n defaultValue = parseInt(defaultValue) || 0;\n length -= 1; // preserve space for trailing null-char\n value = (parseInt(value) || defaultValue)\n .toString(8).substr(-length, length);\n while (value.length < length) {\n value = '0' + value;\n }\n return value + constants.NULL_CHAR;\n}\n\nfunction formatTarDateTime(value, length) {\n if (utils.isDateTime(value)) {\n value = Math.floor(1 * value / 1000);\n } else {\n value = parseInt(value, 10);\n if (isFinite(value)) {\n if (value <= 0) {\n return '';\n }\n } else {\n value = Math.floor(1 * new Date() / 1000);\n }\n }\n return formatTarNumber(value, length, 0);\n}\n\nfunction parseTarString(bytes, returnUnprocessed) {\n var result = String.fromCharCode.apply(null, bytes);\n if (returnUnprocessed) {\n return result;\n }\n var index = result.indexOf(constants.NULL_CHAR);\n return index >= 0 ? result.substr(0, index) : result;\n}\n\nfunction parseTarNumber(bytes) {\n var result = String.fromCharCode.apply(null, bytes);\n return parseInt(result.replace(/^0+$/g, ''), 8) || 0;\n}\n\nfunction parseTarDateTime(bytes) {\n if ((bytes.length == 0) || (bytes[0] == 0)) {\n return null;\n }\n return new Date(1000 * parseTarNumber(bytes));\n}\n\nfunction calculateChecksum(buffer, offset, skipChecksum) {\n var from = parseInt(offset, 10) || 0;\n var to = Math.min(from + effectiveHeaderSize, buffer.length);\n var result = 0;\n\n // When calculating checksum, `checksum` field should be\n // threat as filled with space char (byte 32)\n var skipFrom = 0;\n var skipTo = 0;\n if (skipChecksum) {\n posixHeader.every(function(field) {\n if (field[0] == 'checksum') {\n skipFrom = from + field[2];\n skipTo = skipFrom + field[1];\n return false;\n }\n return true;\n });\n }\n\n var whitespace = ' '.charCodeAt(0);\n for (var i = from; i < to; i++) {\n // 262144 = 8^6 - 6 octal digits - maximum possible value for checksum;\n // wrap to avoid numeric overflow\n var byte = (i >= skipFrom) && (i < skipTo) ? whitespace : buffer[i];\n result = (result + byte) % 262144;\n }\n return result;\n}\n\nmodule.exports.recordSize = recordSize;\nmodule.exports.defaultFileMode = defaultFileMode;\nmodule.exports.defaultUid = defaultUid;\nmodule.exports.defaultGid = defaultGid;\nmodule.exports.posixHeader = posixHeader;\nmodule.exports.effectiveHeaderSize = effectiveHeaderSize;\n\nmodule.exports.calculateChecksum = calculateChecksum;\nmodule.exports.formatTarString = formatTarString;\nmodule.exports.formatTarNumber = formatTarNumber;\nmodule.exports.formatTarDateTime = formatTarDateTime;\nmodule.exports.parseTarString = parseTarString;\nmodule.exports.parseTarNumber = parseTarNumber;\nmodule.exports.parseTarDateTime = parseTarDateTime;\n\n","'use strict';\n\nvar constants = require('./constants');\nvar utils = require('./utils');\nvar types = require('./types');\n\nfunction headerSize(file) {\n // header has fixed size\n return types.recordSize;\n}\n\nfunction dataSize(file) {\n // align to record boundary\n return Math.ceil(file.data.length / types.recordSize) * types.recordSize;\n}\n\nfunction allocateBuffer(files) {\n var totalSize = 0;\n\n // Calculate space that will be used by each file\n files.forEach(function(file) {\n totalSize += headerSize(file) + dataSize(file);\n });\n\n // TAR must end with two empty records\n totalSize += types.recordSize * 2;\n\n // Array SHOULD be initialized with zeros:\n // from TypedArray constructor docs:\n // > When creating a TypedArray instance (i.e. instance of Int8Array\n // > or similar), an array buffer is created internally\n // from ArrayBuffer constructor docs:\n // > A new ArrayBuffer object of the specified size.\n // > Its contents are initialized to 0.\n return new Uint8Array(totalSize);\n}\n\nfunction writeHeader(buffer, file, offset) {\n offset = parseInt(offset) || 0;\n\n var currentOffset = offset;\n types.posixHeader.forEach(function(field) {\n var value = field[3](file, field);\n var length = value.length;\n for (var i = 0; i < length; i += 1) {\n buffer[currentOffset + i] = value.charCodeAt(i) & 0xFF;\n }\n currentOffset += field[1]; // move to the next field\n });\n\n var field = utils.find(types.posixHeader, function(field) {\n return field[0] == 'checksum';\n });\n\n if (field) {\n // Patch checksum field\n var checksum = types.calculateChecksum(buffer, offset, true);\n var value = types.formatTarNumber(checksum, field[1] - 2) +\n constants.NULL_CHAR + ' ';\n currentOffset = offset + field[2];\n for (var i = 0; i < value.length; i += 1) {\n // put bytes\n buffer[currentOffset] = value.charCodeAt(i) & 0xFF;\n currentOffset++;\n }\n }\n\n return offset + headerSize(file);\n}\n\nfunction writeData(buffer, file, offset) {\n offset = parseInt(offset, 10) || 0;\n buffer.set(file.data, offset);\n return offset + dataSize(file);\n}\n\nfunction tar(files) {\n files = utils.map(files, function(file) {\n return utils.extend({}, file, {\n data: utils.toUint8Array(file.data)\n });\n });\n\n var buffer = allocateBuffer(files);\n\n var offset = 0;\n files.forEach(function(file) {\n offset = writeHeader(buffer, file, offset);\n offset = writeData(buffer, file, offset);\n });\n\n return buffer;\n}\n\nmodule.exports.tar = tar;\n","'use strict';\n\nvar constants = require('./constants');\nvar utils = require('./utils');\nvar types = require('./types');\n\nvar defaultOptions = {\n extractData: true,\n checkHeader: true,\n checkChecksum: true,\n checkFileSize: true\n};\n\nvar excludeFields = {\n size: true,\n checksum: true,\n ustar: true\n};\n\nvar messages = {\n unexpectedEndOfFile: 'Unexpected end of file.',\n fileCorrupted: 'File is corrupted.',\n checksumCheckFailed: 'Checksum check failed.'\n};\n\nfunction headerSize(header) {\n // header has fixed size\n return types.recordSize;\n}\n\nfunction dataSize(size) {\n // align to record boundary\n return Math.ceil(size / types.recordSize) * types.recordSize;\n}\n\nfunction isEndOfFile(buffer, offset) {\n var from = offset;\n var to = Math.min(buffer.length, offset + types.recordSize * 2);\n for (var i = from; i < to; i++) {\n if (buffer[i] != 0) {\n return false;\n }\n }\n return true;\n}\n\nfunction readHeader(buffer, offset, options) {\n if (buffer.length - offset < types.recordSize) {\n if (options.checkFileSize) {\n throw new Error(messages.unexpectedEndOfFile);\n }\n return null;\n }\n\n offset = parseInt(offset) || 0;\n\n var result = {};\n var currentOffset = offset;\n types.posixHeader.forEach(function(field) {\n result[field[0]] = field[4](buffer, currentOffset, field);\n currentOffset += field[1];\n });\n\n if (result.type != 0) { // only regular files can have data\n result.size = 0;\n }\n\n if (options.checkHeader) {\n types.posixHeader.forEach(function(field) {\n if (utils.isFunction(field[5]) && !field[5](result, field)) {\n var error = new Error(messages.fileCorrupted);\n error.data = {\n offset: offset + field[2],\n field: field[0]\n };\n throw error;\n }\n });\n }\n\n if (options.checkChecksum) {\n var checksum = types.calculateChecksum(buffer, offset, true);\n if (checksum != result.checksum) {\n var error = new Error(messages.checksumCheckFailed);\n error.data = {\n offset: offset,\n header: result,\n checksum: checksum\n };\n throw error;\n }\n }\n\n return result;\n}\n\nfunction readData(buffer, offset, header, options) {\n if (!options.extractData) {\n return null;\n }\n\n if (header.size <= 0) {\n return new Uint8Array();\n }\n return buffer.slice(offset, offset + header.size);\n}\n\nfunction createFile(header, data) {\n var result = {};\n types.posixHeader.forEach(function(field) {\n var name = field[0];\n if (!excludeFields[name]) {\n result[name] = header[name];\n }\n });\n\n result.isOldGNUFormat = header.ustar == constants.OLDGNU_MAGIC;\n\n if (data) {\n result.data = data;\n }\n\n return result;\n}\n\nfunction untar(buffer, options) {\n options = utils.extend({}, defaultOptions, options);\n\n var result = [];\n var offset = 0;\n var size = buffer.length;\n\n while (size - offset >= types.recordSize) {\n buffer = utils.toUint8Array(buffer);\n var header = readHeader(buffer, offset, options);\n if (!header) {\n break;\n }\n offset += headerSize(header);\n\n var data = readData(buffer, offset, header, options);\n result.push(createFile(header, data));\n offset += dataSize(header.size);\n\n if (isEndOfFile(buffer, offset)) {\n break;\n }\n }\n\n return result;\n}\n\nmodule.exports.untar = untar;\n","'use strict';\n\n// http://www.gnu.org/software/tar/manual/html_node/Standard.html\n\nvar utils = require('./lib/utils');\nvar constants = require('./lib/constants');\nvar tar = require('./lib/tar');\nvar untar = require('./lib/untar');\n\nutils.extend(module.exports, tar, untar, constants);\n","import { BaseFilesystem, ERRNO_CODES, type FsStats } from './base.js'\nimport type { PostgresMod } from '../postgresMod.js'\nimport { PGlite } from '../pglite.js'\n\nexport interface OpfsAhpOptions {\n initialPoolSize?: number\n maintainedPoolSize?: number\n debug?: boolean\n}\n\n// TypeScript doesn't have a built-in type for FileSystemSyncAccessHandle\nexport interface FileSystemSyncAccessHandle {\n close(): void\n flush(): void\n getSize(): number\n read(buffer: ArrayBuffer, options: { at: number }): number\n truncate(newSize: number): void\n write(buffer: ArrayBuffer, options: { at: number }): number\n}\n\n// State\n\nconst STATE_FILE = 'state.txt'\nconst DATA_DIR = 'data'\nconst INITIAL_MODE = {\n DIR: 16384,\n FILE: 32768,\n}\n\nexport interface State {\n root: DirectoryNode\n pool: PoolFilenames\n}\n\nexport type PoolFilenames = Array<string>\n\n// WAL\n\nexport interface WALEntry {\n opp: string\n args: any[]\n}\n\n// Node tree\n\nexport type NodeType = 'file' | 'directory'\n\ninterface BaseNode {\n type: NodeType\n lastModified: number\n mode: number\n}\n\nexport interface FileNode extends BaseNode {\n type: 'file'\n backingFilename: string\n}\n\nexport interface DirectoryNode extends BaseNode {\n type: 'directory'\n children: { [filename: string]: Node }\n}\n\nexport type Node = FileNode | DirectoryNode\n\n/**\n * PGlite OPFS access handle pool filesystem.\n * Opens a pool of sync access handles and then allocates them as needed.\n */\nexport class OpfsAhpFS extends BaseFilesystem {\n declare readonly dataDir: string\n readonly initialPoolSize: number\n readonly maintainedPoolSize: number\n\n #opfsRootAh!: FileSystemDirectoryHandle\n #rootAh!: FileSystemDirectoryHandle\n #dataDirAh!: FileSystemDirectoryHandle\n\n #stateFH!: FileSystemFileHandle\n #stateSH!: FileSystemSyncAccessHandle\n\n #fh: Map<string, FileSystemFileHandle> = new Map()\n #sh: Map<string, FileSystemSyncAccessHandle> = new Map()\n\n #handleIdCounter = 0\n #openHandlePaths: Map<number, string> = new Map()\n #openHandleIds: Map<string, number> = new Map()\n\n state!: State\n lastCheckpoint = 0\n checkpointInterval = 1000 * 60 // 1 minute\n poolCounter = 0\n\n #unsyncedSH = new Set<FileSystemSyncAccessHandle>()\n\n constructor(\n dataDir: string,\n {\n initialPoolSize = 1000,\n maintainedPoolSize = 100,\n debug = false,\n }: OpfsAhpOptions = {},\n ) {\n super(dataDir, { debug })\n this.initialPoolSize = initialPoolSize\n this.maintainedPoolSize = maintainedPoolSize\n }\n\n async init(pg: PGlite, opts: Partial<PostgresMod>) {\n await this.#init()\n return super.init(pg, opts)\n }\n\n async syncToFs(relaxedDurability = false) {\n await this.maybeCheckpointState()\n await this.maintainPool()\n if (!relaxedDurability) {\n this.flush()\n }\n }\n\n async closeFs(): Promise<void> {\n for (const sh of this.#sh.values()) {\n sh.close()\n }\n this.#stateSH.flush()\n this.#stateSH.close()\n this.pg!.Module.FS.quit()\n }\n\n async #init() {\n this.#opfsRootAh = await navigator.storage.getDirectory()\n this.#rootAh = await this.#resolveOpfsDirectory(this.dataDir!, {\n create: true,\n })\n this.#dataDirAh = await this.#resolveOpfsDirectory(DATA_DIR, {\n from: this.#rootAh,\n create: true,\n })\n\n this.#stateFH = await this.#rootAh.getFileHandle(STATE_FILE, {\n create: true,\n })\n this.#stateSH = await (this.#stateFH as any).createSyncAccessHandle()\n\n const stateAB = new ArrayBuffer(this.#stateSH.getSize())\n this.#stateSH.read(stateAB, { at: 0 })\n let state: State\n const stateLines = new TextDecoder().decode(stateAB).split('\\n')\n // Line 1 is a base state object.\n // Lines 1+n are WAL entries.\n\n let isNewState = false\n try {\n state = JSON.parse(stateLines[0])\n } catch (e) {\n state = {\n root: {\n type: 'directory',\n lastModified: Date.now(),\n mode: INITIAL_MODE.DIR,\n children: {},\n },\n pool: [],\n }\n // write new state to file\n this.#stateSH.truncate(0)\n this.#stateSH.write(new TextEncoder().encode(JSON.stringify(state)), {\n at: 0,\n })\n isNewState = true\n }\n this.state = state\n\n // Apply WAL entries\n const wal = stateLines\n .slice(1)\n .filter(Boolean)\n .map((line) => JSON.parse(line))\n for (const entry of wal) {\n const methodName = `_${entry.opp}State`\n if (typeof this[methodName as keyof this] === 'function') {\n try {\n const method = this[methodName as keyof this] as any\n method.bind(this)(...entry.args)\n } catch (e) {\n console.warn('Error applying OPFS AHP WAL entry', entry, e)\n }\n }\n }\n\n // Open all file handles for dir tree\n const walkPromises: Promise<void>[] = []\n const walk = async (node: Node) => {\n if (node.type === 'file') {\n try {\n const fh = await this.#dataDirAh.getFileHandle(node.backingFilename)\n const sh: FileSystemSyncAccessHandle = await (\n fh as any\n ).createSyncAccessHandle()\n this.#fh.set(node.backingFilename, fh)\n\n this.#sh.set(node.backingFilename, sh)\n } catch (e) {\n console.error('Error opening file handle for node', node, e)\n }\n } else {\n for (const child of Object.values(node.children)) {\n walkPromises.push(walk(child))\n }\n }\n }\n await walk(this.state.root)\n\n // Open all pool file handles\n const poolPromises: Promise<void>[] = []\n for (const filename of this.state.pool) {\n poolPromises.push(\n // eslint-disable-next-line no-async-promise-executor\n new Promise<void>(async (resolve) => {\n if (this.#fh.has(filename)) {\n console.warn('File handle already exists for pool file', filename)\n }\n const fh = await this.#dataDirAh.getFileHandle(filename)\n const sh: FileSystemSyncAccessHandle = await (\n fh as any\n ).createSyncAccessHandle()\n this.#fh.set(filename, fh)\n this.#sh.set(filename, sh)\n resolve()\n }),\n )\n }\n\n await Promise.all([...walkPromises, ...poolPromises])\n\n await this.maintainPool(\n isNewState ? this.initialPoolSize : this.maintainedPoolSize,\n )\n }\n\n async maintainPool(size?: number) {\n size = size || this.maintainedPoolSize\n const change = size - this.state.pool.length\n const promises: Promise<void>[] = []\n for (let i = 0; i < change; i++) {\n promises.push(\n // eslint-disable-next-line no-async-promise-executor\n new Promise<void>(async (resolve) => {\n ++this.poolCounter\n const filename = `${(Date.now() - 1704063600).toString(16).padStart(8, '0')}-${this.poolCounter.toString(16).padStart(8, '0')}`\n const fh = await this.#dataDirAh.getFileHandle(filename, {\n create: true,\n })\n const sh: FileSystemSyncAccessHandle = await (\n fh as any\n ).createSyncAccessHandle()\n this.#fh.set(filename, fh)\n this.#sh.set(filename, sh)\n this.#logWAL({\n opp: 'createPoolFile',\n args: [filename],\n })\n this.state.pool.push(filename)\n resolve()\n }),\n )\n }\n for (let i = 0; i > change; i--) {\n promises.push(\n // eslint-disable-next-line no-async-promise-executor\n new Promise<void>(async (resolve) => {\n const filename = this.state.pool.pop()!\n this.#logWAL({\n opp: 'deletePoolFile',\n args: [filename],\n })\n const fh = this.#fh.get(filename)!\n const sh = this.#sh.get(filename)\n sh?.close()\n await this.#dataDirAh.removeEntry(fh.name)\n this.#fh.delete(filename)\n this.#sh.delete(filename)\n resolve()\n }),\n )\n }\n await Promise.all(promises)\n }\n\n _createPoolFileState(filename: string) {\n this.state.pool.push(filename)\n }\n\n _deletePoolFileState(filename: string) {\n const index = this.state.pool.indexOf(filename)\n if (index > -1) {\n this.state.pool.splice(index, 1)\n }\n }\n\n async maybeCheckpointState() {\n if (Date.now() - this.lastCheckpoint > this.checkpointInterval) {\n await this.checkpointState()\n }\n }\n\n async checkpointState() {\n const stateAB = new TextEncoder().encode(JSON.stringify(this.state))\n this.#stateSH.truncate(0)\n this.#stateSH.write(stateAB, { at: 0 })\n this.#stateSH.flush()\n this.lastCheckpoint = Date.now()\n }\n\n flush() {\n for (const sh of this.#unsyncedSH) {\n try {\n sh.flush()\n } catch (e) {\n // The file may have been closed if it was deleted\n }\n }\n this.#unsyncedSH.clear()\n }\n\n // Filesystem API:\n\n chmod(path: string, mode: number): void {\n this.#tryWithWAL({ opp: 'chmod', args: [path, mode] }, () => {\n this._chmodState(path, mode)\n })\n }\n\n _chmodState(path: string, mode: number): void {\n const node = this.#resolvePath(path)\n node.mode = mode\n }\n\n close(fd: number): void {\n const path = this.#getPathFromFd(fd)\n this.#openHandlePaths.delete(fd)\n this.#openHandleIds.delete(path)\n }\n\n fstat(fd: number): FsStats {\n const path = this.#getPathFromFd(fd)\n return this.lstat(path)\n }\n\n lstat(path: string): FsStats {\n const node = this.#resolvePath(path)\n const size =\n node.type === 'file' ? this.#sh.get(node.backingFilename)!.getSize() : 0\n const blksize = 4096\n return {\n dev: 0,\n ino: 0,\n mode: node.mode,\n nlink: 1,\n uid: 0,\n gid: 0,\n rdev: 0,\n size,\n blksize,\n blocks: Math.ceil(size / blksize),\n atime: node.lastModified,\n mtime: node.lastModified,\n ctime: node.lastModified,\n }\n }\n\n mkdir(path: string, options?: { recursive?: boolean; mode?: number }): void {\n this.#tryWithWAL({ opp: 'mkdir', args: [path, options] }, () => {\n this._mkdirState(path, options)\n })\n }\n\n _mkdirState(\n path: string,\n options?: { recursive?: boolean; mode?: number },\n ): void {\n const parts = this.#pathParts(path)\n const newDirName = parts.pop()!\n const currentPath: string[] = []\n let node = this.state.root\n for (const part of parts) {\n currentPath.push(path)\n if (!Object.prototype.hasOwnProperty.call(node.children, part)) {\n if (options?.recursive) {\n this.mkdir(currentPath.join('/'))\n } else {\n throw new FsError('ENOENT', 'No such file or directory')\n }\n }\n if (node.children[part].type !== 'directory') {\n throw new FsError('ENOTDIR', 'Not a directory')\n }\n node = node.children[part] as DirectoryNode\n }\n if (Object.prototype.hasOwnProperty.call(node.children, newDirName)) {\n throw new FsError('EEXIST', 'File exists')\n }\n const newDir: DirectoryNode = {\n type: 'directory',\n lastModified: Date.now(),\n mode: options?.mode || INITIAL_MODE.DIR,\n children: {},\n }\n node.children[newDirName] = newDir\n }\n\n open(path: string, _flags?: string, _mode?: number): number {\n const node = this.#resolvePath(path)\n if (node.type !== 'file') {\n throw new FsError('EISDIR', 'Is a directory')\n }\n const handleId = this.#nextHandleId()\n this.#openHandlePaths.set(handleId, path)\n this.#openHandleIds.set(path, handleId)\n return handleId\n }\n\n readdir(path: string): string[] {\n const node = this.#resolvePath(path)\n if (node.type !== 'directory') {\n throw new FsError('ENOTDIR', 'Not a directory')\n }\n return Object.keys(node.children)\n }\n\n read(\n fd: number,\n buffer: Uint8Array, // Buffer to read into\n offset: number, // Offset in buffer to start writing to\n length: number, // Number of bytes to read\n position: number, // Position in file to read from\n ): number {\n const path = this.#getPathFromFd(fd)\n const node = this.#resolvePath(path)\n if (node.type !== 'file') {\n throw new FsError('EISDIR', 'Is a directory')\n }\n const sh = this.#sh.get(node.backingFilename)!\n return sh.read(new Uint8Array(buffer.buffer, offset, length), {\n at: position,\n })\n }\n\n rename(oldPath: string, newPath: string): void {\n this.#tryWithWAL({ opp: 'rename', args: [oldPath, newPath] }, () => {\n this._renameState(oldPath, newPath, true)\n })\n }\n\n _renameState(oldPath: string, newPath: string, doFileOps = false): void {\n const oldPathParts = this.#pathParts(oldPath)\n const oldFilename = oldPathParts.pop()!\n const oldParent = this.#resolvePath(oldPathParts.join('/')) as DirectoryNode\n if (\n !Object.prototype.hasOwnProperty.call(oldParent.children, oldFilename)\n ) {\n throw new FsError('ENOENT', 'No such file or directory')\n }\n const newPathParts = this.#pathParts(newPath)\n const newFilename = newPathParts.pop()!\n const newParent = this.#resolvePath(newPathParts.join('/')) as DirectoryNode\n if (\n doFileOps &&\n Object.prototype.hasOwnProperty.call(newParent.children, newFilename)\n ) {\n // Overwrite, so return the underlying file to the pool\n const node = newParent.children[newFilename]! as FileNode\n const sh = this.#sh.get(node.backingFilename)!\n sh.truncate(0)\n this.state.pool.push(node.backingFilename)\n }\n newParent.children[newFilename] = oldParent.children[oldFilename]!\n delete oldParent.children[oldFilename]\n }\n\n rmdir(path: string): void {\n this.#tryWithWAL({ opp: 'rmdir', args: [path] }, () => {\n this._rmdirState(path)\n })\n }\n\n _rmdirState(path: string): void {\n const pathParts = this.#pathParts(path)\n const dirName = pathParts.pop()!\n const parent = this.#resolvePath(pathParts.join('/')) as DirectoryNode\n if (!Object.prototype.hasOwnProperty.call(parent.children, dirName)) {\n throw new FsError('ENOENT', 'No such file or directory')\n }\n const node = parent.children[dirName]!\n if (node.type !== 'directory') {\n throw new FsError('ENOTDIR', 'Not a directory')\n }\n if (Object.keys(node.children).length > 0) {\n throw new FsError('ENOTEMPTY', 'Directory not empty')\n }\n delete parent.children[dirName]\n }\n\n truncate(path: string, len = 0): void {\n const node = this.#resolvePath(path)\n if (node.type !== 'file') {\n throw new FsError('EISDIR', 'Is a directory')\n }\n const sh = this.#sh.get(node.backingFilename)\n if (!sh) {\n throw new FsError('ENOENT', 'No such file or directory')\n }\n sh.truncate(len)\n this.#unsyncedSH.add(sh)\n }\n\n unlink(path: string): void {\n this.#tryWithWAL({ opp: 'unlink', args: [path] }, () => {\n this._unlinkState(path, true)\n })\n }\n\n _unlinkState(path: string, doFileOps = false): void {\n const pathParts = this.#pathParts(path)\n const filename = pathParts.pop()!\n const dir = this.#resolvePath(pathParts.join('/')) as DirectoryNode\n if (!Object.prototype.hasOwnProperty.call(dir.children, filename)) {\n throw new FsError('ENOENT', 'No such file or directory')\n }\n const node = dir.children[filename]!\n if (node.type !== 'file') {\n throw new FsError('EISDIR', 'Is a directory')\n }\n delete dir.children[filename]\n if (doFileOps) {\n const sh = this.#sh.get(node.backingFilename)!\n // We don't delete the file, it's truncated and returned to the pool\n sh?.truncate(0)\n this.#unsyncedSH.add(sh)\n if (this.#openHandleIds.has(path)) {\n this.#openHandlePaths.delete(this.#openHandleIds.get(path)!)\n this.#openHandleIds.delete(path)\n }\n }\n this.state.pool.push(node.backingFilename)\n }\n\n utimes(path: string, atime: number, mtime: number): void {\n this.#tryWithWAL({ opp: 'utimes', args: [path, atime, mtime] }, () => {\n this._utimesState(path, atime, mtime)\n })\n }\n\n _utimesState(path: string, _atime: number, mtime: number): void {\n const node = this.#resolvePath(path)\n node.lastModified = mtime\n }\n\n writeFile(\n path: string,\n data: string | Uint8Array,\n options?: { encoding?: string; mode?: number; flag?: string },\n ): void {\n const pathParts = this.#pathParts(path)\n const filename = pathParts.pop()!\n const parent = this.#resolvePath(pathParts.join('/')) as DirectoryNode\n\n if (!Object.prototype.hasOwnProperty.call(parent.children, filename)) {\n if (this.state.pool.length === 0) {\n throw new Error('No more file handles available in the pool')\n }\n const node: Node = {\n type: 'file',\n lastModified: Date.now(),\n mode: options?.mode || INITIAL_MODE.FILE,\n backingFilename: this.state.pool.pop()!,\n }\n parent.children[filename] = node\n this.#logWAL({\n opp: 'createFileNode',\n args: [path, node],\n })\n } else {\n const node = parent.children[filename] as FileNode\n node.lastModified = Date.now()\n this.#logWAL({\n opp: 'setLastModified',\n args: [path, node.lastModified],\n })\n }\n const node = parent.children[filename] as FileNode\n const sh = this.#sh.get(node.backingFilename)!\n // Files in pool are empty, only write if data is provided\n if (data.length > 0) {\n sh.write(\n typeof data === 'string'\n ? new TextEncoder().encode(data)\n : new Uint8Array(data),\n { at: 0 },\n )\n if (path.startsWith('/pg_wal')) {\n this.#unsyncedSH.add(sh)\n }\n }\n }\n\n _createFileNodeState(path: string, node: FileNode): FileNode {\n const pathParts = this.#pathParts(path)\n const filename = pathParts.pop()!\n const parent = this.#resolvePath(pathParts.join('/')) as DirectoryNode\n parent.children[filename] = node\n // remove backingFilename from pool\n const index = this.state.pool.indexOf(node.backingFilename)\n if (index > -1) {\n this.state.pool.splice(index, 1)\n }\n return node\n }\n\n _setLastModifiedState(path: string, lastModified: number): void {\n const node = this.#resolvePath(path)\n node.lastModified = lastModified\n }\n\n write(\n fd: number,\n buffer: Uint8Array, // Buffer to read from\n offset: number, // Offset in buffer to start reading from\n length: number, // Number of bytes to write\n position: number, // Position in file to write to\n ): number {\n const path = this.#getPathFromFd(fd)\n const node = this.#resolvePath(path)\n if (node.type !== 'file') {\n throw new FsError('EISDIR', 'Is a directory')\n }\n const sh = this.#sh.get(node.backingFilename)\n if (!sh) {\n throw new FsError('EBADF', 'Bad file descriptor')\n }\n const ret = sh.write(new Uint8Array(buffer, offset, length), {\n at: position,\n })\n if (path.startsWith('/pg_wal')) {\n this.#unsyncedSH.add(sh)\n }\n return ret\n }\n\n // Internal methods:\n\n #tryWithWAL(entry: WALEntry, fn: () => void) {\n const offset = this.#logWAL(entry)\n try {\n fn()\n } catch (e) {\n // Rollback WAL entry\n this.#stateSH.truncate(offset)\n throw e\n }\n }\n\n #logWAL(entry: WALEntry) {\n const entryJSON = JSON.stringify(entry)\n const stateAB = new TextEncoder().encode(`\\n${entryJSON}`)\n const offset = this.#stateSH.getSize()\n this.#stateSH.write(stateAB, { at: offset })\n this.#unsyncedSH.add(this.#stateSH)\n return offset\n }\n\n #pathParts(path: string): string[] {\n return path.split('/').filter(Boolean)\n }\n\n #resolvePath(path: string, from?: DirectoryNode): Node {\n const parts = this.#pathParts(path)\n let node: Node = from || this.state.root\n for (const part of parts) {\n if (node.type !== 'directory') {\n throw new FsError('ENOTDIR', 'Not a directory')\n }\n if (!Object.prototype.hasOwnProperty.call(node.children, part)) {\n throw new FsError('ENOENT', 'No such file or directory')\n }\n node = node.children[part]!\n }\n return node\n }\n\n #getPathFromFd(fd: number): string {\n const path = this.#openHandlePaths.get(fd)\n if (!path) {\n throw new FsError('EBADF', 'Bad file descriptor')\n }\n return path\n }\n\n #nextHandleId(): number {\n const id = ++this.#handleIdCounter\n while (this.#openHandlePaths.has(id)) {\n this.#handleIdCounter++\n }\n return id\n }\n\n async #resolveOpfsDirectory(\n path: string,\n options?: {\n from?: FileSystemDirectoryHandle\n create?: boolean\n },\n ): Promise<FileSystemDirectoryHandle> {\n const parts = this.#pathParts(path)\n let ah = options?.from || this.#opfsRootAh\n for (const part of parts) {\n ah = await ah.getDirectoryHandle(part, { create: options?.create })\n }\n return ah\n }\n}\n\nclass FsError extends Error {\n code?: number\n constructor(code: number | keyof typeof ERRNO_CODES | null, message: string) {\n super(message)\n if (typeof code === 'number') {\n this.code = code\n } else if (typeof code === 'string') {\n this.code = ERRNO_CODES[code]\n }\n }\n}\n","import type { PostgresMod } from '../postgresMod.js'\nimport type { PGlite } from '../pglite.js'\nimport { dumpTar, type DumpTarCompressionOptions } from './tarUtils.js'\n\nexport const WASM_PREFIX = '/tmp/pglite'\nexport const PGDATA = WASM_PREFIX + '/' + 'base'\n\nexport type FsType = 'nodefs' | 'idbfs' | 'memoryfs' | 'opfs-ahp'\n\n/**\n * Filesystem interface.\n * All virtual filesystems that are compatible with PGlite must implement\n * this interface.\n */\nexport interface Filesystem {\n /**\n * Initiate the filesystem and return the options to pass to the emscripten module.\n */\n init(\n pg: PGlite,\n emscriptenOptions: Partial<PostgresMod>,\n ): Promise<{ emscriptenOpts: Partial<PostgresMod> }>\n\n /**\n * Sync the filesystem to any underlying storage.\n */\n syncToFs(relaxedDurability?: boolean): Promise<void>\n\n /**\n * Sync the filesystem from any underlying storage.\n */\n initialSyncFs(): Promise<void>\n\n /**\n * Dump the PGDATA dir from the filesystem to a gziped tarball.\n */\n dumpTar(\n dbname: string,\n compression?: DumpTarCompressionOptions,\n ): Promise<File | Blob>\n\n /**\n * Close the filesystem.\n */\n closeFs(): Promise<void>\n}\n\n/**\n * Base class for all emscripten built-in filesystems.\n */\nexport class EmscriptenBuiltinFilesystem implements Filesystem {\n protected dataDir?: string\n protected pg?: PGlite\n\n constructor(dataDir?: string) {\n this.dataDir = dataDir\n }\n\n async init(pg: PGlite, emscriptenOptions: Partial<PostgresMod>) {\n this.pg = pg\n return { emscriptenOpts: emscriptenOptions }\n }\n\n async syncToFs(_relaxedDurability?: boolean) {}\n\n async initialSyncFs() {}\n\n async closeFs() {}\n\n async dumpTar(dbname: string, compression?: DumpTarCompressionOptions) {\n return dumpTar(this.pg!.Module.FS, PGDATA, dbname, compression)\n }\n}\n\n/**\n * Abstract base class for all custom virtual filesystems.\n * Each custom filesystem needs to implement an interface similar to the NodeJS FS API.\n */\nexport abstract class BaseFilesystem implements Filesystem {\n protected dataDir?: string\n protected pg?: PGlite\n readonly debug: boolean\n\n constructor(dataDir?: string, { debug = false }: { debug?: boolean } = {}) {\n this.dataDir = dataDir\n this.debug = debug\n }\n\n async syncToFs(_relaxedDurability?: boolean) {}\n\n async initialSyncFs() {}\n\n async closeFs() {}\n\n async dumpTar(dbname: string, compression?: DumpTarCompressionOptions) {\n return dumpTar(this.pg!.Module.FS, PGDATA, dbname, compression)\n }\n\n async init(pg: PGlite, emscriptenOptions: Partial<PostgresMod>) {\n this.pg = pg\n const options: Partial<PostgresMod> = {\n ...emscriptenOptions,\n preRun: [\n ...(emscriptenOptions.preRun || []),\n (mod: PostgresMod) => {\n const EMFS = createEmscriptenFS(mod, this)\n mod.FS.mkdir(PGDATA)\n mod.FS.mount(EMFS, {}, PGDATA)\n },\n ],\n }\n return { emscriptenOpts: options }\n }\n\n // Filesystem API\n\n abstract chmod(path: string, mode: number): void\n abstract close(fd: number): void\n abstract fstat(fd: number): FsStats\n abstract lstat(path: string): FsStats\n abstract mkdir(\n path: string,\n options?: { recursive?: boolean; mode?: number },\n ): void\n abstract open(path: string, flags?: string, mode?: number): number\n abstract readdir(path: string): string[]\n abstract read(\n fd: number,\n buffer: Uint8Array, // Buffer to read into\n offset: number, // Offset in buffer to start writing to\n length: number, // Number of bytes to read\n position: number, // Position in file to read from\n ): number\n abstract rename(oldPath: string, newPath: string): void\n abstract rmdir(path: string): void\n abstract truncate(\n path: string,\n len: number, // Length to truncate to - defaults to 0\n ): void\n abstract unlink(path: string): void\n abstract utimes(path: string, atime: number, mtime: number): void\n abstract writeFile(\n path: string,\n data: string | Uint8Array,\n options?: { encoding?: string; mode?: number; flag?: string },\n ): void\n abstract write(\n fd: number,\n buffer: Uint8Array, // Buffer to read from\n offset: number, // Offset in buffer to start reading from\n length: number, // Number of bytes to write\n position: number, // Position in file to write to\n ): number\n}\n\nexport type FsStats = {\n dev: number\n ino: number\n mode: number\n nlink: number\n uid: number\n gid: number\n rdev: number\n size: number\n blksize: number\n blocks: number\n atime: number\n mtime: number\n ctime: number\n}\n\ntype EmscriptenFileSystem = Emscripten.FileSystemType & {\n createNode: (\n parent: FSNode | null,\n name: string,\n mode: number,\n dev?: any,\n ) => FSNode\n node_ops: FS.NodeOps\n stream_ops: FS.StreamOps & {\n dup: (stream: FSStream) => void\n mmap: (\n stream: FSStream,\n length: number,\n position: number,\n prot: any,\n flags: any,\n ) => { ptr: number; allocated: boolean }\n msync: (\n stream: FSStream,\n buffer: Uint8Array,\n offset: number,\n length: number,\n mmapFlags: any,\n ) => number\n }\n} & { [key: string]: any }\n\ntype FSNode = FS.FSNode & {\n node_ops: FS.NodeOps\n stream_ops: FS.StreamOps\n}\n\ntype FSStream = FS.FSStream & {\n node: FSNode\n shared: {\n refcount: number\n }\n}\n\ntype FSMount = FS.Mount & {\n opts: {\n root: string\n }\n}\n\ntype EmscriptenFS = PostgresMod['FS'] & {\n createNode: (\n parent: FSNode | null,\n name: string,\n mode: number,\n dev?: any,\n ) => FSNode\n}\n\nexport const ERRNO_CODES = {\n EBADF: 8,\n EBADFD: 127,\n EEXIST: 20,\n EINVAL: 28,\n EISDIR: 31,\n ENODEV: 43,\n ENOENT: 44,\n ENOTDIR: 54,\n ENOTEMPTY: 55,\n} as const\n\n/**\n * Create an emscripten filesystem that uses the BaseFilesystem.\n * @param Module The emscripten module\n * @param baseFS The BaseFilesystem implementation\n * @returns The emscripten filesystem\n */\nconst createEmscriptenFS = (Module: PostgresMod, baseFS: BaseFilesystem) => {\n const FS = Module.FS as EmscriptenFS\n const log = baseFS.debug ? console.log : null\n const EMFS = {\n tryFSOperation<T>(f: () => T): T {\n try {\n return f()\n } catch (e: any) {\n if (!e.code) throw e\n if (e.code === 'UNKNOWN') throw new FS.ErrnoError(ERRNO_CODES.EINVAL)\n throw new FS.ErrnoError(e.code)\n }\n },\n mount(_mount: FSMount): FSNode {\n return EMFS.createNode(null, '/', 16384 | 511, 0)\n },\n syncfs(\n _mount: FS.Mount,\n _populate: any, // This has the wrong type in @types/emscripten\n _done: (err?: number | null) => unknown,\n ): void {\n // noop\n },\n createNode(\n parent: FSNode | null,\n name: string,\n mode: number,\n _dev?: any,\n ): FSNode {\n if (!FS.isDir(mode) && !FS.isFile(mode)) {\n throw new FS.ErrnoError(28)\n