UNPKG

@alwatr/node-fs

Version:

Enhanced file system operations in Node.js with asynchronous queue to prevent parallel writes.

5 lines (4 loc) 5.59 kB
/** 📦 @alwatr/node-fs v5.5.27 */ __dev_mode__: console.debug("📦 @alwatr/node-fs v5.5.27"); "use strict";var __defProp=Object.defineProperty;var __getOwnPropDesc=Object.getOwnPropertyDescriptor;var __getOwnPropNames=Object.getOwnPropertyNames;var __hasOwnProp=Object.prototype.hasOwnProperty;var __export=(target,all)=>{for(var name in all)__defProp(target,name,{get:all[name],enumerable:true})};var __copyProps=(to,from,except,desc)=>{if(from&&typeof from==="object"||typeof from==="function"){for(let key of __getOwnPropNames(from))if(!__hasOwnProp.call(to,key)&&key!==except)__defProp(to,key,{get:()=>from[key],enumerable:!(desc=__getOwnPropDesc(from,key))||desc.enumerable})}return to};var __toCommonJS=mod=>__copyProps(__defProp({},"__esModule",{value:true}),mod);var main_exports={};__export(main_exports,{existsSync:()=>import_node_fs4.existsSync,makeEmptyFile:()=>makeEmptyFile,readFile:()=>readFile,readFileSync:()=>readFileSync,readJson:()=>readJson,resolve:()=>import_node_path3.resolve,unlink:()=>import_promises4.unlink,writeFile:()=>writeFile,writeFileSync:()=>writeFileSync,writeJson:()=>writeJson});module.exports=__toCommonJS(main_exports);var import_node_fs=require("node:fs");var import_promises=require("node:fs/promises");var import_flat_string=require("@alwatr/flat-string");var import_async_queue=require("@alwatr/async-queue");var import_logger=require("@alwatr/logger");var logger=(0,import_logger.createLogger)("@alwatr/node-fs");var asyncQueue=new import_async_queue.AsyncQueue;function readFileSync(path){logger.logMethodArgs?.("readFileSync","..."+path.slice(-32));try{return(0,import_flat_string.flatString)((0,import_node_fs.readFileSync)(path,{encoding:"utf-8",flag:"r"}))}catch(err){logger.error("readFileSync","read_file_failed",{path},err);throw new Error("read_file_failed",{cause:err.cause})}}function readFile(path){logger.logMethodArgs?.("readFile","..."+path.slice(-32));return asyncQueue.push(path,async()=>{try{return(0,import_flat_string.flatString)(await(0,import_promises.readFile)(path,{encoding:"utf-8",flag:"r"}))}catch(err){logger.error("readFile","read_file_failed",{path},err);throw new Error("read_file_failed",{cause:err.cause})}})}var import_node_fs2=require("node:fs");var import_promises2=require("node:fs/promises");var import_node_path=require("node:path");function writeFileSync(path,content){logger.logMethodArgs?.("writeFileSync","..."+path.slice(-32));try{const pathExists=(0,import_node_fs2.existsSync)(path);if(!pathExists){const dir=(0,import_node_path.dirname)(path);if(!(0,import_node_fs2.existsSync)(dir)){(0,import_node_fs2.mkdirSync)(dir,{recursive:true})}}(0,import_node_fs2.writeFileSync)(path+".tmp",content,{encoding:"utf-8",flag:"w"});if(pathExists){(0,import_node_fs2.renameSync)(path,path+".bak")}(0,import_node_fs2.renameSync)(path+".tmp",path);logger.logOther?.("writeFileSync success","..."+path.slice(-32))}catch(err){logger.error("writeFileSync","write_file_failed",{path},err);throw new Error("write_file_failed",{cause:err.cause})}}function writeFile(path,content){logger.logMethodArgs?.("writeFile","..."+path.slice(-32));return asyncQueue.push(path,async()=>{try{logger.logOther?.("writeFile start","..."+path.slice(-32));const pathExists=(0,import_node_fs2.existsSync)(path);if(!pathExists){const dir=(0,import_node_path.dirname)(path);if(!(0,import_node_fs2.existsSync)(dir)){await(0,import_promises2.mkdir)(dir,{recursive:true})}}await(0,import_promises2.writeFile)(path+".tmp",content,{encoding:"utf-8",flag:"w"});if(pathExists){await(0,import_promises2.rename)(path,path+".bak")}await(0,import_promises2.rename)(path+".tmp",path);logger.logOther?.("writeFile success","..."+path.slice(-32))}catch(err){logger.error("writeFile","write_file_failed",{path},err);throw new Error("write_file_failed",{cause:err.cause})}})}function parseJson(content){try{return JSON.parse(content)}catch(err){logger.error("parseJson","invalid_json",err);throw new Error("invalid_json",{cause:err.cause})}}function jsonStringify(data){try{return JSON.stringify(data)}catch(err){logger.error("jsonStringify","stringify_failed",err);throw new Error("stringify_failed",{cause:err.cause})}}function readJson(path,sync=false){logger.logMethodArgs?.("readJson",{path:path.slice(-32),sync});if(sync===true){return parseJson(readFileSync(path))}else{return readFile(path).then(content=>parseJson(content))}}var import_flat_string2=require("@alwatr/flat-string");function writeJson(path,data,sync=false){logger.logMethodArgs?.("writeJson","..."+path.slice(-32));const content=(0,import_flat_string2.flatString)(jsonStringify(data));return sync===true?writeFileSync(path,content):writeFile(path,content)}var import_node_fs3=require("node:fs");var import_promises3=require("node:fs/promises");var import_node_path2=require("node:path");async function makeEmptyFile(path){logger.logMethodArgs?.("makeEmptyFile","..."+path.slice(-32));try{const pathExists=(0,import_node_fs3.existsSync)(path);if(!pathExists){const dir=(0,import_node_path2.dirname)(path);if(!(0,import_node_fs3.existsSync)(dir)){await(0,import_promises3.mkdir)(dir,{recursive:true})}}await(await(0,import_promises3.open)(path,"w")).close()}catch(err){logger.error("makeEmptyFile","make_file_failed",{path},err);throw new Error("make_file_failed",{cause:err.cause})}}var import_node_path3=require("node:path");var import_node_fs4=require("node:fs");var import_promises4=require("node:fs/promises");0&&(module.exports={existsSync,makeEmptyFile,readFile,readFileSync,readJson,resolve,unlink,writeFile,writeFileSync,writeJson}); //# sourceMappingURL=main.cjs.map