UNPKG

ninjs-lodash

Version:
476 lines (406 loc) 14.4 kB
/** * File System Utils */ 'use strict' const _ = require('lodash') const fs = require('fs-extra') const glob = require('glob') const core = require('./core') exports = module.exports = { "scan": scan, "scanJs": scanJs, "scanMd": scanMd, "scanPackages": scanPackages, "scanPackageDirs": scanPackageDirs, "pkgs": pkgs, "pkg": pkg, "writepkg": writepkg, "ostat": ostat, "exists": exists, "existsSync": existsSync, "stat": stat, "statSync": statSync, "lstat": lstat, "lstatSync": lstatSync, "fstat": fstat, "fstatSync": fstatSync, "copy": copy, "rename": rename, "remove": remove, "move": move, "dirPaths": dirPaths, "ensureDir": ensureDir, "mkdirs": mkdirs, "emptyDir": emptyDir, "readFile": readFile, "readBufferSync": readBufferSync, "readFileSync": readFileSync, "writeFileSync": writeFileSync, "outputFile": outputFile, "readJson": readJson, "outputJson": outputJson, "base64FromFile": base64FromFile, "base64ToFile": base64ToFile, "treeFilePaths": treeFilePaths, "treeFiles": treeFiles, "treeStatPaths": treeStatPaths, "treeStats": treeStats, "tree": tree } // _.assign(_, { fs: fs }) _.mixin(exports) // SCAN VIA GLOB -> TOO SLOW WITH A LOT OF node_modules // params = _.merge({}, { // cwd: process.cwd(), // root: process.cwd(), // dot: true, // nosort: false, // stat: true, // cache: null, // ignore: ['node_modules/**/*'], // absolute: true // }, params) // return glob(pattern, params, _.cb(callback)) -> too slow if a lot of files (ie. node_modules/**/*) function scan(options, callback) { let { pattern='', params } = options let excludes = ['.git', 'node_modules'] pattern = pattern ? pattern.replace('**/*', '') : '' params = _.merge({ path: '', root: undefined, excludes: excludes }, params) let isStats = _.get(params, 'stats', false) return tree(params, (err, result) => { if(err) return _.fail(err, callback) let results = treeStats(result, [], (stats) => { return _.compact(_.map(stats, (stat) => { let path = _.get(stat, 'path') let discard = false _.each(excludes, (exclude) => { if(_.includes(path, exclude)) discard = true }) if(pattern && !_.includes(path, pattern)) discard = true return discard ? null : stat })) }) results = results || [] results = isStats ? results : _.map(results, (rslt) => { return rslt.path }) return _.done(results, callback) }) } function scanJs(options, callback) { return scan({ pattern:'**/*.js', params: options }, _.cb(callback)) } function scanMd(options, callback) { return scan({ pattern:'**/*.md', params: options }, _.cb(callback)) } function scanPackages(options, callback) { return scan({ pattern:'**/*package.json', params: options }, _.cb(callback)) } function scanPackageDirs(options, callback) { return scanPackages(options, (err, results) => { if(err) return _.fail(err, callback) let isStats = _.get(options, 'stats', false) return _.done(_.map(results, (result) => { return isStats ? _.path.dir(_.get(result, 'path')) : _.path.dir(result) }), callback) }) } function pkgs(options, callback) { return scanPackages(_.merge(options, { stats: false }), (err, results) => { let tasks = _.map(results, (result) => { return _.async.reflect(function(cb) { return readJson(result, (err, jresult) => { if(err) return _.fail(err, cb) _.set(jresult, '_root', _.path.dir(result)) _.set(jresult, '_path', result) return _.done(jresult, cb) }) }) }) return _.async.parallel(tasks, (err, results) => { if(err) return _.fail(err, callback) return _.done(_.compact(_.map(results, (result) => { return _.get(result, 'value') }) ), callback) }) }) } function pkg(options, callback) { let { name='' } = options if(!name || !_.isString(name)) return _.fail('Invalid name', callback) return pkgs(options, (err, results) => { let result = _.find(results, { name: name }) return result ? _.done(result, callback) : _.fail('Package not found', callback) }) } function writepkg(src, data, callback) { callback = _.isFunction(callback) ? callback : _.isFunction(data) ? data : _.isFunction(src) ? src : function() {} data = _.isPlainObject(data) ? data : _.isPlainObject(src) ? src : null src = src && _.isString(src) ? src : data ? _.get(data, '_path') || '' : '' if(!src || !_.isString(src)) return _.fail('Invalid src', callback) if(!data) return _.fail('Invalid data', callback) data = _.omit(data, ['_root','_path', 'cmd', 'env', 'excludes']) // order props by picking let obj = _.pick(data, ["name", "version", "main", "license", "author", "contributors", "description", "keywords", "config", "repository", "homepage", "bugs", "bin", "man", "scripts", "directories", "files", "engines", "engineStrict", "dependencies", "devDependencies", "peerDependencies", "bundledDependencies", "optionalDependencies", "os", "cpu", "preferGlobal", "private", "publishConfig"]) let str = _.attempt(_.files('js').toPretty, obj, { wrap: 160, sort: false, align: true, indent: ' ', colors: false }) str = str && _.isString(str) && !_.isError(str) ? str : '' return outputJson(src, str || obj, _.cb(callback)) } function ostat(options, callback) { let path = _.get(options, 'path') || '' let root = _.get(options, 'root') || '' if(!path) return _.fail('Invalid path', callback) return fs.lstat(path, (err, stat) => { if (err) return _.fail(err, callback) let rel = root ? path.replace(root, '') : path rel = _.startsWith(rel, _.path.sep) ? rel.substr(1) : rel let split = rel ? _.compact(rel.split(_.path.sep)) : [] let url = '/' + _.join(split, '/') //let cleanStat = _._.omit(stat, ['uid', 'gid', 'rdev', 'nlink', 'mode', 'dev', 'ino', 'atime', 'mtime', 'ctime', 'birthtime']) let isDir = stat.isDirectory() let isSym = stat.isSymbolicLink() let isFifo = stat.isFIFO() let isSocket = stat.isSocket() let size = _.get(stat, 'size') || 0 let result = _.assign({ isDir: isDir, isFile: !isDir, isSym: isSym, //isFifo: isFifo, //isSocket: isSocket, url: url, rel: rel, root: root, path: path, size: size, sizef: '' }, isDir ? { dirs: [], files: [] } : {}) return _.done(result, callback) }) } function exists(src, callback) { return fs.access(src, fs.constants.F_OK, (err) => { return err ? _.fail(err, callback) : _.done(src, callback) }) } function existsSync(path) { return !_.isError(_.attempt(fs.accessSync, path, fs.constants.F_OK)) ? true : false } function stat(path, callback) { return fs.stat(path, _.cb(callback)) } function statSync(path) { return fs.statSync(path) } function lstat(path, callback) { return fs.lstat(path, _.cb(callback)) } function lstatSync(path) { return fs.lstatSync(path) } function fstat(path, callback) { return fs.fstat(path, _.cb(callback)) } function fstatSync(path) { return fs.fstatSync(path) } function copy(src, dest, callback) { return fs.copy(src, dest, _.cb(callback)) } function rename(src, dest, callback) { return fs.rename(src, dest, _.cb(callback)) } function remove(src, callback) { return fs.remove(src, _.cb(callback)) } function move(src, dest, callback) { return fs.move(src, dest, _.cb(callback)) } function dirPaths(src, callback) { return fs.readdir(src, (err, results) => { if(err) return _.fail(err, callback) return _.done(_.map(results, (result) => { return _.path.join(src, result) }), callback) }) } function ensureDir(src, callback) { return fs.ensureDir(src, _.cb(callback)) } function mkdirs(src, callback) { return fs.mkdirs(src, _.cb(callback)) } function emptyDir(src, callback) { return fs.emptyDir(src, _.cb(callback)) } function readBufferSync(src) { // returns Buffer if 'utf8' option not specified let data = _.attempt(fs.readFileSync, src) return data && !_.isError(data) ? data : null } function readFileSync(src) { // returns Buffer if 'utf8' option not specified let data = _.attempt(fs.readFileSync, src, 'utf8') return data && !_.isError(data) ? data : null } function writeFileSync(dest, data) { let res = _.attempt(fs.writeFileSync, dest, data) return _.isError(res) ? null : dest } function readFile(src, callback) { return fs.readFile(src, 'utf8', _.cb(callback)) } function outputFile(src, data, callback) { return fs.outputFile(src, data, (err) => { return err ? _.fail(err, callback) : _.done(src, callback) }) } function readJson(src, callback) { return fs.readJson(src, _.cb(callback)) } function outputJson(src, data, callback) { if(!src || !data || !_.path.ex(src) === 'json') return _.fail('Invalid src', callback) data = _.isBuffer(data) ? data.toString('utf8') : data data = _.isObject(data) ? JSON.stringify(data, null, ' ') : (_.isString(data) ? data || '' : '') return outputFile(src, data, callback) } /** * Base64 * original: http://www.hacksparrow.com/base64-encoding-decoding-in-node-js.html * stackoverflow: http://stackoverflow.com/questions/28834835/readfile-in-base64-nodejs */ // function to encode file data to base64 encoded string function base64FromFile(src) { if(!src || !_.isString(src)) return null // read binary data let bitmap = readFileSync(src) // convert binary data to base64 encoded string bitmap = _.isBuffer(bitmap) ? bitmap : _.isString(bitmap) ? new Buffer(bitmap) : null return bitmap ? bitmap.toString('base64') : null } // function to create file from base64 encoded string function base64ToFile(dest, base64str) { if(_.notString(dest)) return null if(_.notString(base64str)) return null // create buffer object from base64 encoded string, it is important to tell the constructor that the string is base64 encoded let bitmap = new Buffer(base64str, 'base64') // write buffer to file return writeFileSync(dest, bitmap) } function treeFilePaths(dir, memo, filter) { let files = treeFiles(dir, memo, filter) return files ? _.map(files, (file) => { return _.get(file, 'path') }) : [] } function treeFiles(dir, memo, filter) { memo = memo && _.isArray(memo) ? memo : [] let files = dir.files let filtered = _.isFunction(filter) ? _.attempt(filter, files) : undefined files = _.isError(filtered) ? files : filtered || files memo = memo.concat(files) _.each(dir.dirs, (odir) => { memo = treeFiles(odir, memo, filter) }) return memo } function treeStatPaths(dir, memo, filter) { let files = treeStats(dir, memo, filter) return files ? _.map(files, (file) => { return _.get(file, 'path') }) : [] } function treeStats(dir, memo, filter) { memo = memo || [] let files = _.concat(dir.dirs, dir.files) let filtered = _.isFunction(filter) ? _.attempt(filter, files) : undefined files = _.isError(filtered) ? files : filtered || files memo = memo.concat(files) _.each(dir.dirs, (odir) => { memo = treeStats(odir, memo, filter) }) return memo } function tree(options, callback) { let root = _.get(options, 'root') let excludes = _.get(options, 'excludes') || [] ostat(options, (err, result) => { if (err) return _.fail(err, callback) function allDone() { let total = 0; _.each(result.files, (file) => { total += file.size }) _.each(result.dirs, (dir) => { total += dir.size }) result.size = total result.sizef = _.bytes(total) _.done(result, callback) } if(!result.isDir) { return _.done(result, callback) } else { fs.readdir(result.path, (err, list) => { if (err) return _.fail(err, callback) list = excludes && excludes.length ? _.filter(list, (item) => { return !_.includes(excludes, item) }) : list let pending = list.length if (!pending) return _.done(result, callback) _.each(list, (file) => { ostat({ path: _.path.join(result.path, file), root: root }, (err, child) => { if (err) return _.fail(err, callback) if (child.isDir) { tree({ path: child.path, root: root, excludes: excludes }, (err, res) => { let total = 0; _.each(res.files, (file) => { total += file.size }) _.each(res.dirs, (dir) => { total += dir.size }) res.size = total res.sizef = _.bytes(total) result.dirs.push(res) //console.log(res) if (!--pending) allDone() //return _.done(result, callback) }) } else { child.sizef = _.bytes(child.size) result.files.push(child) if (!--pending) allDone() } }) }) }) } }) } // ~~~~~ FS ~~~~~ // CONFIG // _log(fs.spaces) // _log(fs.jsonfile) // // MISC // gracefulify // TIMESTAMPS // utimes // futimes // lutimes // // PATHS // access // fs.realpath(__dirname, (err, result) => { _log(result) }) // fstat // lstat // stat // exists // // DIRS AND FILES // copy // rename // watch // remove // move // // DIRS // rmdir // fsync // mkdir // readdir // mkdtemp // mkdirs // mkdirp // ensureDir // emptyDir // emptydir // walk // // FILES // readFile // writeFile // outputFile // appendFile // createFile // ensureFile // watchFile // unwatchFile // truncate // ftruncate // // LINKS // readlink // symlink // link // unlink // createLink // ensureLink // createSymlink // ensureSymlink // // PERMS // fchmod // chmod // fchown // chown // lchown // lchmod // // STREAMS // close // open // read // write // createReadStream // ReadStream // FileReadStream // createWriteStream // WriteStream // FileWriteStream // // JSON // readJson // readJSON // writeJson // writeJSON // outputJson // outputJSON