opfs-mock
Version:
Mock all origin private file system APIs for your Jest or Vitest tests
272 lines (269 loc) • 10.7 kB
JavaScript
//#region src/utils.ts
const isFileHandle = (handle) => {
return handle.kind === "file";
};
const isDirectoryHandle = (handle) => {
return handle.kind === "directory";
};
const getSizeOfDirectory = async (directory) => {
let totalSize = 0;
for await (const handle of directory.values()) if (isFileHandle(handle)) {
const file = await handle.getFile();
totalSize += file.size;
} else if (isDirectoryHandle(handle)) totalSize += await getSizeOfDirectory(handle);
return totalSize;
};
//#endregion
//#region src/opfs.ts
const fileSystemFileHandleFactory = (name, fileData) => {
return {
kind: "file",
name,
isSameEntry: async (other) => {
return other.name === name && other.kind === "file";
},
getFile: async () => new File([fileData.content], name),
createWritable: async (options) => {
const keepExistingData = options?.keepExistingData;
let abortReason = "";
let isAborted = false;
let isClosed = false;
let content = keepExistingData ? new Uint8Array(fileData.content) : new Uint8Array();
let cursorPosition = keepExistingData ? fileData.content.length : 0;
const writableStream = new WritableStream({
write: () => {},
close: () => {},
abort: () => {}
});
return Object.assign(writableStream, {
getWriter: () => writableStream.getWriter(),
write: async function(chunk) {
if (isAborted) throw new Error(abortReason);
if (isClosed) throw new TypeError("Cannot write to a CLOSED writable stream");
if (chunk === void 0) throw new TypeError("Cannot write undefined data to the stream");
if (typeof chunk === "object" && "type" in chunk && chunk.type === "truncate") {
if (typeof chunk.size !== "number" || chunk.size < 0) throw new TypeError("Invalid size value in truncate parameters");
if (chunk.size < content.length) content = content.slice(0, chunk.size);
else {
const extended = new Uint8Array(chunk.size);
extended.set(content);
content = extended;
}
cursorPosition = Math.min(cursorPosition, chunk.size);
return;
}
let encoded;
if (typeof chunk === "string") encoded = new TextEncoder().encode(chunk);
else if (chunk instanceof Blob) {
const text = await chunk.text();
encoded = new TextEncoder().encode(text);
} else if (ArrayBuffer.isView(chunk)) encoded = new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
else if (chunk instanceof ArrayBuffer) encoded = new Uint8Array(chunk);
else if (typeof chunk === "object" && "data" in chunk) {
if (chunk.position !== void 0 && (typeof chunk.position !== "number" || chunk.position < 0)) throw new TypeError("Invalid position value in write parameters");
if (chunk.size !== void 0 && (typeof chunk.size !== "number" || chunk.size < 0)) throw new TypeError("Invalid size value in write parameters");
if (chunk.position !== void 0 && chunk.position !== null) cursorPosition = chunk.position;
const data = chunk.data;
if (data === void 0 || data === null) encoded = new Uint8Array();
else if (typeof data === "string") encoded = new TextEncoder().encode(data);
else if (data instanceof Blob) {
const text = await data.text();
encoded = new TextEncoder().encode(text);
} else if (ArrayBuffer.isView(data)) encoded = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
else if (data instanceof ArrayBuffer) encoded = new Uint8Array(data);
else throw new TypeError("Invalid data in WriteParams");
} else throw new TypeError("Invalid data type written to the file. Data must be of type FileSystemWriteChunkType.");
const requiredSize = cursorPosition + encoded.length;
if (content.length < requiredSize) {
const extended = new Uint8Array(requiredSize);
extended.set(content);
content = extended;
}
content.set(encoded, cursorPosition);
cursorPosition += encoded.length;
},
close: async function() {
if (isClosed) throw new TypeError("Cannot close a CLOSED writable stream");
if (isAborted) throw new TypeError("Cannot close a ERRORED writable stream");
isClosed = true;
fileData.content = content;
},
abort: async function(reason) {
if (isAborted) return;
if (reason && !abortReason) abortReason = reason;
isAborted = true;
return Promise.resolve(void 0);
},
truncate: async function(size) {
if (size < 0) throw new DOMException("Invalid truncate size", "IndexSizeError");
if (size < content.length) content = content.slice(0, size);
else if (size > content.length) {
const newBuffer = new Uint8Array(size);
newBuffer.set(content);
content = newBuffer;
}
cursorPosition = Math.min(cursorPosition, size);
},
seek: async function(position) {
if (position < 0) throw new DOMException("Invalid seek position", "IndexSizeError");
cursorPosition = position;
}
});
},
createSyncAccessHandle: async () => {
let closed = false;
return {
getSize: () => {
if (closed) throw new DOMException("InvalidStateError", "The access handle is closed");
return fileData.content.byteLength;
},
read: (buffer, { at = 0 } = {}) => {
if (closed) throw new DOMException("InvalidStateError", "The access handle is closed");
const content = fileData.content;
if (at >= content.length) return 0;
const available = content.length - at;
const writable = buffer instanceof DataView ? buffer.byteLength : buffer.length;
const bytesToRead = Math.min(writable, available);
const slice = content.subarray(at, at + bytesToRead);
if (buffer instanceof DataView) for (let i = 0; i < slice.length; i++) buffer.setUint8(i, slice[i]);
else buffer.set(slice, 0);
return bytesToRead;
},
write: (data, { at = 0 } = {}) => {
if (closed) throw new DOMException("InvalidStateError", "The access handle is closed");
const writeLength = data instanceof DataView ? data.byteLength : data.length;
const requiredSize = at + writeLength;
if (fileData.content.length < requiredSize) {
const newBuffer = new Uint8Array(requiredSize);
newBuffer.set(fileData.content);
fileData.content = newBuffer;
}
if (data instanceof DataView) for (let i = 0; i < data.byteLength; i++) fileData.content[at + i] = data.getUint8(i);
else fileData.content.set(data, at);
return writeLength;
},
truncate: (size) => {
if (closed) throw new DOMException("InvalidStateError", "The access handle is closed");
if (size < fileData.content.length) fileData.content = fileData.content.slice(0, size);
else if (size > fileData.content.length) {
const newBuffer = new Uint8Array(size);
newBuffer.set(fileData.content);
fileData.content = newBuffer;
}
},
flush: async () => {
if (closed) throw new DOMException("InvalidStateError", "The access handle is closed");
},
close: async () => {
closed = true;
}
};
}
};
};
const fileSystemDirectoryHandleFactory = (name) => {
const files = new Map();
const directories = new Map();
const getJoinedMaps = () => {
return new Map([...files, ...directories]);
};
return {
kind: "directory",
name,
isSameEntry: async (other) => {
return other.name === name && other.kind === "directory";
},
getFileHandle: async (fileName, options) => {
if (!files.has(fileName) && options?.create) files.set(fileName, fileSystemFileHandleFactory(fileName, { content: new Uint8Array() }));
const fileHandle = files.get(fileName);
if (!fileHandle) throw new DOMException(`File not found: ${fileName}`, "NotFoundError");
return fileHandle;
},
getDirectoryHandle: async (dirName, options) => {
if (!directories.has(dirName) && options?.create) directories.set(dirName, fileSystemDirectoryHandleFactory(dirName));
const directoryHandle = directories.get(dirName);
if (!directoryHandle) throw new DOMException(`Directory not found: ${dirName}`, "NotFoundError");
return directoryHandle;
},
removeEntry: async (entryName, options) => {
if (files.has(entryName)) files.delete(entryName);
else if (directories.has(entryName)) if (options?.recursive) directories.delete(entryName);
else throw new DOMException(`Failed to remove directory: $1${entryName}`, "InvalidModificationError");
else throw new DOMException(`No such file or directory: $1${entryName}`, "NotFoundError");
},
[Symbol.asyncIterator]: async function* () {
const entries = getJoinedMaps();
for (const [name$1, handle] of entries) yield [name$1, handle];
return void 0;
},
entries: async function* () {
const joinedMaps = getJoinedMaps();
yield* joinedMaps.entries();
},
keys: async function* () {
const joinedMaps = getJoinedMaps();
yield* joinedMaps.keys();
},
values: async function* () {
const joinedMaps = getJoinedMaps();
yield* joinedMaps.values();
},
resolve: async function(possibleDescendant) {
const traverseDirectory = async (directory, target, path = []) => {
if (await directory.isSameEntry(target)) return path;
for await (const [name$1, handle] of directory.entries()) if (isDirectoryHandle(handle)) {
const result = await traverseDirectory(handle, target, [...path, name$1]);
if (result) return result;
} else if (isFileHandle(handle)) {
if (await handle.isSameEntry(target)) return [...path, name$1];
}
return null;
};
return traverseDirectory(this, possibleDescendant);
}
};
};
//#endregion
//#region src/index.ts
const storageFactory = ({ usage = 0, quota = 1024 ** 3 } = {}) => {
const root = fileSystemDirectoryHandleFactory("root");
return {
estimate: async () => {
const defaultUsage = usage;
const calculatedUsage = await getSizeOfDirectory(root);
return {
usage: defaultUsage + calculatedUsage,
quota
};
},
getDirectory: async () => {
return root;
},
persist: async () => {
return true;
},
persisted: async () => {
return true;
}
};
};
const mockOPFS = () => {
if (!("navigator" in globalThis)) Object.defineProperty(globalThis, "navigator", {
value: {},
writable: true
});
if (!globalThis.navigator.storage) Object.defineProperty(globalThis.navigator, "storage", {
value: storageFactory(),
writable: true
});
};
const resetMockOPFS = () => {
const root = fileSystemDirectoryHandleFactory("root");
Object.defineProperty(globalThis.navigator.storage, "getDirectory", {
value: () => root,
writable: true
});
};
if (typeof globalThis !== "undefined") mockOPFS();
//#endregion
export { mockOPFS, resetMockOPFS, storageFactory };