UNPKG

fs-zoo

Version:

File system abstractions and implementations

453 lines (452 loc) 16.7 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.NodeCrud = void 0; const util_1 = require("memfs/lib/node-to-fsa/util"); const util_2 = require("../crud/util"); const util_3 = require("../fsa-to-crud/util"); class NodeCrud { constructor(options) { this.options = options; this.separator = options.separator ?? '/'; let dir = options.dir; const last = dir[dir.length - 1]; if (last !== this.separator) dir = dir + this.separator; this._root = dir; this.fs = options.fs; } async _dir(collection) { const dir = this._root + (collection.length ? collection.join(this.separator) + this.separator : ''); // Avoid statting the root directory when collection is empty; some adapters // (e.g., FSA-backed) don't accept '/' if (!collection.length) return dir; const fs = this.fs; try { const stats = await fs.promises.stat(dir); if (!stats.isDirectory()) throw (0, util_3.newFolder404Error)(collection); return dir; } catch (error) { if (error && typeof error === 'object') { switch (error.code) { case 'ENOENT': case 'ENOTDIR': throw (0, util_3.newFolder404Error)(collection); } } throw error; } } async _file(collection, id, flags) { (0, util_2.assertType)(collection, 'get', 'crudfs'); (0, util_1.assertName)(id, 'get', 'crudfs'); const dir = await this._dir(collection); const filename = dir + id; return await this.fs.promises.open(filename, flags); } async write(path, options) { const [collection, id] = (0, util_2.parseId)(path); (0, util_2.assertType)(collection, 'put', 'crudfs'); (0, util_1.assertName)(id, 'put', 'crudfs'); const dir = this._root + (collection.length ? collection.join(this.separator) + this.separator : ''); const fs = this.fs; if (dir.length > 1) await fs.promises.mkdir(dir, { recursive: true }); const filename = dir + id; // Handle throwIf options for file existence checks const throwIf = options?.throwIf; const pos = options?.pos; // For positioned writes, we need special handling that Node.js createWriteStream doesn't support well if (typeof pos === 'number') { // Fall back to file handle approach for positioned writes to ensure compatibility return this._createPositionedWritableStream(filename, options); } // Handle existence checks for regular (non-positioned) writes if (throwIf) { try { const stats = await fs.promises.stat(filename); if (throwIf === 'exists') throw (0, util_3.newExistsError)(); if (!stats.isFile()) throw (0, util_1.newNotAllowedError)(); } catch (error) { if (error && typeof error === 'object' && error.code === 'ENOENT') { if (throwIf === 'missing') throw (0, util_3.newMissingError)(); } else throw error; } } // Handle regular writes with createWriteStream let flags = ''; if (throwIf === 'exists') { flags = 'wx'; // Create exclusive, fail if exists } else if (throwIf === 'missing') { flags = 'r+'; // Open existing file, fail if missing } else { flags = 'w'; // Default: create or truncate } const nodeWriteStream = fs.createWriteStream(filename, { flags }); return new WritableStream({ write: async (chunk) => { return new Promise((resolve, reject) => { nodeWriteStream.write(chunk, (error) => { if (error) reject(error); else resolve(); }); }); }, close: async () => { return new Promise((resolve, reject) => { nodeWriteStream.end((error) => { if (error) reject(error); else resolve(); }); }); }, abort: async (reason) => { nodeWriteStream.destroy(); throw reason; }, }); } async _createPositionedWritableStream(filename, options) { const fs = this.fs; const throwIf = options?.throwIf; let pos = options?.pos; let handle; // Open file handle based on throwIf option if (throwIf === 'exists') { // Create new file exclusively; fail if it already exists. try { handle = await fs.promises.open(filename, 'wx'); } catch (error) { if (error && typeof error === 'object' && error.code === 'EEXIST') throw (0, util_3.newExistsError)(); throw error; } } else if (throwIf === 'missing') { // Open existing file for read/write; fail if missing. try { handle = await fs.promises.open(filename, 'r+'); } catch (error) { if (error && typeof error === 'object' && error.code === 'ENOENT') throw (0, util_3.newMissingError)(); throw error; } } else { // Default: open for read/write; if missing, create then reopen without truncation. try { handle = await fs.promises.open(filename, 'r+'); } catch (error) { if (error && typeof error === 'object' && error.code === 'ENOENT') { await fs.promises.writeFile(filename, new Uint8Array()); handle = await fs.promises.open(filename, 'r+'); } else { throw error; } } } if (pos === -1) { // Append: compute size without changing file content or position semantics const stats = await handle.stat(); pos = stats.size; } return new WritableStream({ write: async (chunk) => { if (!handle) throw new Error('File handle is not available'); const result = await handle.write(chunk, 0, chunk.byteLength, pos); pos += result.bytesWritten; }, close: async () => { if (handle) { await handle.close(); handle = undefined; } }, abort: async (reason) => { if (handle) { await handle.close(); handle = undefined; } throw reason; }, }); } async dir(path, options) { const [collection, id] = (0, util_2.parseId)(path); (0, util_2.assertType)(collection, 'dir', 'crudfs'); (0, util_1.assertName)(id, 'dir', 'crudfs'); const dir = this._root + (collection.length ? collection.join(this.separator) + this.separator : ''); const fs = this.fs; if (dir.length > 1) await fs.promises.mkdir(dir, { recursive: true }); const filename = dir + id; const throwIf = options?.throwIf; // Determine existence and type let exists = false; try { const st = await fs.promises.stat(filename); exists = st.isDirectory() ? 'dir' : st.isFile() ? 'file' : false; } catch (e) { if (!(e && typeof e === 'object' && e.code === 'ENOENT')) throw e; } switch (throwIf) { case 'exists': { if (exists) throw (0, util_3.newExistsError)(); await fs.promises.mkdir(filename, { recursive: true }); return; } case 'missing': { if (exists !== 'dir') throw (0, util_3.newMissingError)(); return; // directory already there } default: { if (!exists) { await fs.promises.mkdir(filename, { recursive: true }); } else if (exists === 'file') { // cannot replace file with directory under default behavior throw (0, util_3.newExistsError)(); } return; } } } async put(path, data, options) { // Use the write method for file data const writerStream = await this.write(path, options); const writer = writerStream.getWriter(); try { await writer.write(data); await writer.close(); } catch (error) { await writer.abort(); throw error; } } async read(path) { const [collection, id] = (0, util_2.parseId)(path); (0, util_2.assertType)(collection, 'read', 'crudfs'); (0, util_1.assertName)(id, 'read', 'crudfs'); const dir = await this._dir(collection); const filename = dir + id; try { const reader = this.fs.createReadStream(filename); return new ReadableStream({ start(controller) { reader.on('data', chunk => { controller.enqueue(new Uint8Array(chunk)); }); reader.on('end', () => { try { controller.close(); } finally { reader.destroy(); } }); reader.on('error', error => { if (error && typeof error === 'object') { switch (error.code) { case 'ENOENT': controller.error((0, util_3.newFile404Error)(collection, id)); break; default: controller.error(error); } } }); }, cancel() { reader.destroy(); }, }); } catch (error) { if (error && typeof error === 'object') { switch (error.code) { case 'ENOENT': throw (0, util_3.newFile404Error)(collection, id); } } throw error; } } async file(path) { const [collection, id] = (0, util_2.parseId)(path); const dir = await this._dir(collection); const filename = dir + id; const data = await this.fs.promises.readFile(filename); return new File([new Blob([data])], id); } async del(path, silent) { const [collection, id] = (0, util_2.parseId)(path); (0, util_2.assertType)(collection, 'del', 'crudfs'); (0, util_1.assertName)(id, 'del', 'crudfs'); try { const dir = await this._dir(collection); const filename = dir + id; await this.fs.promises.unlink(filename); } catch (error) { if (!!silent) return; if (error && typeof error === 'object') { switch (error.code) { case 'ENOENT': throw (0, util_3.newFile404Error)(collection, id); } } throw error; } } async info(path) { const [collection, id] = (0, util_2.parseId)(path); const isRootPath = !collection.length && !id; if (!isRootPath) { (0, util_2.assertType)(collection, 'info', 'crudfs'); (0, util_1.assertName)(id, 'info', 'crudfs'); } await this._dir(collection); if (isRootPath) { return { type: 'collection', id: '', readable: true, }; } try { // Build base dir path without introducing a double slash when collection is empty const base = this._root + (collection.length ? collection.join(this.separator) + this.separator : ''); const fullPath = base + id; const stats = await this.fs.promises.stat(fullPath); // Access mode constants (Node: F_OK=0, X_OK=1, W_OK=2, R_OK=4) const R_OK = 4; const check = async (mode) => { try { await this.fs.access(fullPath, mode); return true; } catch { return false; } }; if (stats.isFile()) { // Only perform a non-destructive readability check; some adapters may // implement write-access checks in a destructive way. const readable = await check(R_OK); return { type: 'resource', id, size: stats.size, modified: stats.mtimeMs, readable, }; } else if (stats.isDirectory()) { const readable = await check(R_OK); return { type: 'collection', id: '', readable, }; } else { throw (0, util_3.newMissingError)(); } } catch (error) { if (error && typeof error === 'object') { switch (error.code) { case 'ENOENT': throw (0, util_3.newFile404Error)(collection, id); } } throw error; } } async drop(path, silent) { const collection = (0, util_2.parseParts)(path); (0, util_2.assertType)(collection, 'drop', 'crudfs'); try { const dir = await this._dir(collection); const isRoot = dir === this._root; if (isRoot) { const list = (await this.fs.promises.readdir(dir)); for (const entry of list) await this.fs.promises.rm(dir + entry, { recursive: true }); } else { await this.fs.promises.rm(dir, { recursive: true }); } } catch (error) { if (!silent) throw error; } } async *scan(path) { const collection = (0, util_2.parseParts)(path); (0, util_2.assertType)(collection, 'scan', 'crudfs'); const dir = await this._dir(collection); const dirents = (await this.fs.promises.readdir(dir, { withFileTypes: true })); for (const entry of dirents) { if (entry.isFile()) { yield { type: 'resource', id: '' + entry.name, }; } else if (entry.isDirectory()) { yield { type: 'collection', id: '' + entry.name, }; } } } async list(path) { const entries = []; for await (const entry of this.scan(path)) entries.push(entry); return entries; } async from(path) { const collection = (0, util_2.parseParts)(path); (0, util_2.assertType)(collection, 'from', 'crudfs'); const dir = this._root + (collection.length ? collection.join(this.separator) + this.separator : ''); const fs = this.fs; if (dir.length > 1) await fs.promises.mkdir(dir, { recursive: true }); await this._dir(collection); return new NodeCrud({ dir, fs: this.fs, separator: this.separator, }); } } exports.NodeCrud = NodeCrud;