node-uglifier-es
Version:
Fully auto merging and uglifying a whole NodeJs project into one file with external files option. Recompiled from Zsolt Istvan Szabo's work with uglify-es instead of uglify-js-harmony.
450 lines (413 loc) • 19.3 kB
JavaScript
// Generated by CoffeeScript 2.3.1
(function() {
///*!
// * node-uglifier
// * Copyright (c) 2014 Zsolt Szabo Istvan
// * MIT Licensed
// *
// */
var Errors, Graph, NodeUglifier, UGLIFY_SOURCE_MAP_TOKEN, UglifyJS, _, cryptoUtils, fs, fsExtra, packageUtils, path, saltLength, sugar, util;
Errors = require("./libs/Errors");
Graph = require("./libs/js-graph-mod/src/js-graph");
fsExtra = require('fs-extra');
fs = require('fs');
_ = require('underscore');
sugar = require('sugar');
sugar.extend();
path = require('path');
packageUtils = require('./libs/packageUtils');
cryptoUtils = require('./libs/cryptoUtils');
UglifyJS = require('uglify-es');
util = require("util");
saltLength = 20;
UGLIFY_SOURCE_MAP_TOKEN = "UGLIFY_SOURCE_MAP_TOKEN";
//default options below
//options={mergeFileFilterWithExport:[],containerName:"cachedModules"}
/* mergeFileFilterWithExport */
//if the file has require statements to merged files, than error is thrown, so they should not depend on your merge project files
//path is relative to the main file
//can be directory, than none of the files in it is merged
//if extension is not given than all matching files are included
NodeUglifier = class NodeUglifier {
constructor(mainFile, options = {}) {
this.getNewRelativePathForFilteredWithExport = this.getNewRelativePathForFilteredWithExport.bind(this);
this.getNewRelativePathForFiltered = this.getNewRelativePathForFiltered.bind(this);
this.getRequireSubstitutionForFilteredWithExport = this.getRequireSubstitutionForFilteredWithExport.bind(this);
//defaults
this.options = {
mergeFileFilterWithExport: [],
mergeFileFilter: [],
newFilteredFileDir: "./lib_external",
containerName: "cachedModules",
rngSeed: null,
licenseFile: null,
fileExtensions: ["js", "coffee", "json"],
suppressFilteredDependentError: false,
packNodeModules: false
};
_.extend(this.options, options);
//if we give full path it should handle it
this.mainFileAbs = path.resolve(mainFile) || path.resolve(process.cwd(), mainFile);
if (!fs.existsSync(this.mainFileAbs)) {
throw new Error("main file not found " + this.mainFileAbs);
} else {
console.log("processing main file: " + this.mainFileAbs);
}
this.salt = cryptoUtils.generateSalt(saltLength);
this.hashAlgorithm = "sha1";
this.wrappedSourceContainerName = this.options.containerName;
//if we use array module container the array indices shuffled for extra obscurity
this.serialMappings = cryptoUtils.shuffleArray((function() {
var results = [];
for (var j = 0; j <= 10000; j++){ results.push(j); }
return results;
}).apply(this), this.options.rngSeed);
this._sourceCodes = {}; //hashes have source,sourceMod,sourceModWrapped
this.statistics = {};
this.filteredOutFiles = packageUtils.getMatchingFiles(this.mainFileAbs, this.options.mergeFileFilter);
this.lastResult = null;
}
getSourceContainer(serial) {
return this.wrappedSourceContainerName + "[" + this.serialMappings[serial] + "]";
}
getRequireSubstitutionForMerge(serial) {
return this.getSourceContainer(serial) + ".exports";
}
getNewRelativePathForFilteredWithExport(pathAbs) {
return path.join(this.options.newFilteredFileDir, path.basename(pathAbs));
}
getNewRelativePathForFiltered(pathAbs) {
var relPath;
relPath = path.relative(path.dirname(this.mainFileAbs), path.dirname(pathAbs));
return path.join(relPath, path.basename(pathAbs));
}
getRequireSubstitutionForFilteredWithExport(pathAbs, relPathFn) {
var relFile, relFileNoExt;
relFile = relPathFn(pathAbs);
relFileNoExt = relFile.replace(path.extname(relFile), "");
return "require('./" + relFileNoExt.replace(/\\/g, "/") + "')";
}
addWrapper(source, serial) {
var firstLine, lastLine, modulesArrayStr, secondLine;
modulesArrayStr = this.getSourceContainer(serial);
firstLine = modulesArrayStr + "={exports:{}};" + "\n";
secondLine = "(function(module,exports) {";
lastLine = "}).call(this," + modulesArrayStr + "," + modulesArrayStr + ".exports);";
return "\n" + firstLine + secondLine + source + lastLine;
}
merge() {
var _this, depGraph, edgeToRemove, filteredOutFilesWithExport, firstLine, iter, me, r, recursiveSourceGrabber, wasCycle;
_this = this;
firstLine = "var " + this.wrappedSourceContainerName + "=[];";
//filteredOutFilesObj:{} has files by basename contains {source,sourceMod,pathRel,serial} #where pathRel is their saved location after exporting relative to the exported main file
r = {
source: firstLine,
filteredOutFilesObj: {},
sourceMapModules: {},
pathOrder: [],
cycles: []
};
// graph.addNewVertex('k1', 'oldValue1');
// graph.addNewEdge('k5', 'k3');
depGraph = new Graph();
filteredOutFilesWithExport = packageUtils.getMatchingFiles(this.mainFileAbs, this.options.mergeFileFilterWithExport);
recursiveSourceGrabber = function(filePath) {
var ast, basename, filteredOutFilesObj, isSourceObjDepFiltered, isSourceObjDepFilteredWithExport, isSourceObjFiltered, isSourceObjFilteredWithExport, j, len, me, msg, otherSerial, pathSaltedHash, relPathFnc, replacement, requireStatement, requireStatements, source, sourceObj, sourceObjDep;
try {
depGraph.addNewVertex(filepath, filepath);
} catch (error) {
me = error;
}
//do nothing path vertex exists
source = packageUtils.readFile(filePath).toString();
if (_.isEqual(path.extname(filePath), ".json")) {
source = "module.exports=(" + source + ");";
}
//add source and wrapped source
pathSaltedHash = cryptoUtils.getSaltedHash(filePath, _this.hashAlgorithm, _this.salt);
if (_this._sourceCodes[pathSaltedHash] == null) {
_this._sourceCodes[pathSaltedHash] = {
source,
serial: _.keys(_this._sourceCodes).length,
sourceMod: source //wrappedModifiedSource:packageUtils.substituteRequireWrapperFnc(source,pathSaltedHash)
};
console.log(filePath + " added to sources ");
}
// if !_.isEmpty(filePath.match("express.js"))
// a=1+1
sourceObj = _this._sourceCodes[pathSaltedHash];
isSourceObjFilteredWithExport = filteredOutFilesWithExport.filter(function(fFile) {
return path.normalize(fFile) === path.normalize(filePath);
}).length > 0;
isSourceObjFiltered = _this.filteredOutFiles.filter(function(fFile) {
return path.normalize(fFile) === path.normalize(filePath);
}).length > 0;
ast = packageUtils.getAst(source);
requireStatements = packageUtils.getRequireStatements(ast, filePath, _this.fileExtensions, _this.options.packNodeModules);
//add salted hashes of files
requireStatements.forEach(function(o, i) {
return requireStatements[i] = _.extend(o, {
pathSaltedHash: cryptoUtils.getSaltedHash(o.path, _this.hashAlgorithm, _this.salt)
});
});
for (j = 0, len = requireStatements.length; j < len; j++) {
requireStatement = requireStatements[j];
try {
depGraph.addNewVertex(requireStatement.path, null);
} catch (error) {
me = error;
}
try {
//do nothing path vertex exists
depGraph.addNewEdge(filePath, requireStatement.path);
} catch (error) {
me = error;
}
//do nothing path edge exists
sourceObjDep = _this._sourceCodes[requireStatement.pathSaltedHash];
if (isSourceObjFilteredWithExport && packageUtils.getIfNonNativeNotFilteredNonNpm(requireStatement.path, filteredOutFilesWithExport, _this.options.fileExtensions)) {
//filtered out files are not allowed to have dependency on merged fiels
msg = "filtered files can not have dependency on merged files, file: " + filePath + " dependency: " + requireStatement.path;
if (_this.options.suppressFilteredDependentError) {
console.warn(msg);
} else {
throw new Error(msg);
}
}
if (sourceObjDep == null) {
recursiveSourceGrabber(requireStatement.path);
}
sourceObjDep = _this._sourceCodes[requireStatement.pathSaltedHash];
if (sourceObjDep == null) {
throw new Error(" internal should not happen 1");
}
otherSerial = sourceObjDep.serial;
isSourceObjDepFilteredWithExport = filteredOutFilesWithExport.filter(function(fFile) {
return path.normalize(fFile) === path.normalize(requireStatement.path);
}).length > 0;
isSourceObjDepFiltered = _this.filteredOutFiles.filter(function(fFile) {
return path.normalize(fFile) === path.normalize(requireStatement.path);
}).length > 0;
if (isSourceObjDepFilteredWithExport) {
//replace with the new path to the filtered out file
replacement = _this.getRequireSubstitutionForFilteredWithExport(requireStatement.path, _this.getNewRelativePathForFilteredWithExport);
} else if (isSourceObjDepFiltered) {
replacement = _this.getRequireSubstitutionForFilteredWithExport(requireStatement.path, _this.getNewRelativePathForFiltered);
} else {
//replace require with wrappedSourceContainerName
replacement = _this.getRequireSubstitutionForMerge(otherSerial);
r.sourceMapModules[_this.getSourceContainer(otherSerial)] = path.relative(path.dirname(_this.mainFileAbs), requireStatement.path);
}
// if requireStatement.text.indexOf("rootDependency")>-1
// a=1
// b=2
sourceObj.sourceMod = packageUtils.replaceRequireStatement(sourceObj.sourceMod, requireStatement.text, replacement);
}
// sourceObj.sourceMod = packageUtils.replaceRequireStatement(sourceObj.sourceMod, requireStatement.text, replacement)
// sourceObj.sourceMod = packageUtils.replaceRequireStatement(sourceObj.sourceMod, requireStatement.text, replacement)
if (isSourceObjFilteredWithExport || isSourceObjFiltered) {
//no need to wrap filtered out external files
if (isSourceObjFiltered) {
relPathFnc = _this.getNewRelativePathForFiltered;
basename = relPathFnc(filePath);
} else if (isSourceObjFilteredWithExport) {
relPathFnc = _this.getNewRelativePathForFilteredWithExport;
basename = path.basename(filePath);
}
if (r.filteredOutFilesObj[basename]) {
filteredOutFilesObj = r.filteredOutFilesObj[basename];
if (filteredOutFilesObj.serial !== sourceObj.serial) {
throw new Error(" external files with same filename not supported yet");
}
} else {
r.filteredOutFilesObj[basename] = {
pathRel: relPathFnc(filePath)
};
return _.extend(r.filteredOutFilesObj[basename], sourceObj);
}
} else {
//add wrapped version
if (sourceObj.serial > 0) {
sourceObj.sourceModWrapped = _this.addWrapper(sourceObj.sourceMod, sourceObj.serial);
} else {
sourceObj.sourceModWrapped = sourceObj.sourceMod;
}
r.pathOrder.push(filePath);
return r.source = r.source + sourceObj.sourceModWrapped;
}
};
recursiveSourceGrabber(this.mainFileAbs);
this.lastResult = r;
//check for cyclic dependencies and throw error listing them
wasCycle = true;
iter = 0;
while (wasCycle && iter < 1000) {
iter++;
wasCycle = false;
try {
depGraph.topologically(function(vertex, vertexVal) {});
} catch (error) {
me = error;
wasCycle = true;
if (me.cycle) {
r.cycles.push(me.cycle);
edgeToRemove = [me.cycle.last(2).last(), me.cycle.last(2).first()].reverse();
depGraph.removeEdge.apply(depGraph, edgeToRemove);
}
}
}
if (!_.isEmpty(r.cycles)) {
throw new Errors.CyclicDependencies(r.cycles);
}
return this;
}
//exportDir - the whole dependency structure relative to the project root will be placed here
//srcDirMap - this is an object like {coffee:{src:"lib_compiled",etc:blabla}} so if the value is found in a dependency folder than key file extendison file is searched in the key folder
exportDependencies(exportDir, srcDirMap = null) {
var baseDir, baseName, baseNameNoExtension, baseNameOther, exportDirAbs, extension, from, fromToMap, j, k, len, len1, mirrorExt, newFile, newFileOther, otherBaseDir, otherFile, p, projectDir, ref, results, sourceFileDidNotExist, sourceFileDidNotExistArr, to, toFromMap;
sourceFileDidNotExistArr = [];
if (!this.lastResult) {
this.merge();
}
if (!this.lastResult.pathOrder) {
throw new Error("there was no dependencies to export");
return;
}
exportDirAbs = path.resolve(exportDir) || path.resolve(process.cwd(), exportDir);
projectDir = process.cwd();
ref = this.lastResult.pathOrder;
//path is used as library
for (j = 0, len = ref.length; j < len; j++) {
p = ref[j];
//find the sub Dirs after root
if (p.indexOf(projectDir) !== 0) {
throw new Error(`${p} dependency not found each dependency should be in the project Dir: ${projectDir}`);
}
baseDir = path.dirname(p.slice(projectDir.length + 1));
baseName = path.basename(p);
extension = path.extname(p);
baseNameNoExtension = baseName.slice(0, +(baseName.length - extension.length - 1) + 1 || 9e9);
//copy file part
newFile = path.resolve(path.join(exportDirAbs, baseDir, baseName));
fsExtra.ensureDirSync(path.dirname(newFile));
fs.createReadStream(p).pipe(fs.createWriteStream(newFile));
if (srcDirMap) {
for (mirrorExt in srcDirMap) {
fromToMap = srcDirMap[mirrorExt];
toFromMap = _.invert(fromToMap);
for (to in toFromMap) {
from = toFromMap[to];
otherBaseDir = baseDir.replace(to, from);
if (otherBaseDir === baseDir) {
continue;
}
otherFile = path.join(path.resolve(process.cwd(), otherBaseDir), baseNameNoExtension + "." + mirrorExt);
baseNameOther = path.basename(otherFile);
if (fsExtra.existsSync(otherFile)) {
//file exists do the copy
newFileOther = path.resolve(path.join(exportDirAbs, otherBaseDir, baseNameOther));
//copy file part
fsExtra.ensureDirSync(path.dirname(newFileOther));
fs.createReadStream(otherFile).pipe(fs.createWriteStream(newFileOther));
} else {
sourceFileDidNotExistArr.push(otherFile);
}
console.log(otherFile);
}
}
}
}
results = [];
for (k = 0, len1 = sourceFileDidNotExistArr.length; k < len1; k++) {
sourceFileDidNotExist = sourceFileDidNotExistArr[k];
results.push(console.log("WARNING source file did not exist: " + sourceFileDidNotExist));
}
return results;
}
toString() {
return this.lastResult.source.toString();
}
exportToFile(file) {
var _this, outDirRoot, outFileAbs;
_this = this;
//write the new merged main file
outFileAbs = path.resolve(file);
fsExtra.ensureDirSync(path.dirname(outFileAbs));
fs.writeFileSync(outFileAbs, this.toString());
outDirRoot = path.dirname(outFileAbs);
_.keys(_this.lastResult.filteredOutFilesObj).forEach(function(fileName) {
var copyObj, newFile;
copyObj = _this.lastResult.filteredOutFilesObj[fileName];
newFile = path.resolve(outDirRoot, copyObj.pathRel);
fsExtra.ensureDirSync(path.dirname(newFile));
return fs.writeFileSync(newFile, copyObj.sourceMod);
});
return _this.filteredOutFiles.forEach(function(fileName) {
var newFile, pathRel;
pathRel = _this.getNewRelativePathForFiltered(fileName);
newFile = path.resolve(outDirRoot, pathRel);
fsExtra.ensureDirSync(path.dirname(newFile));
return fs.createReadStream(fileName).pipe(fs.createWriteStream(newFile));
});
}
//both uglify and for modules
exportSourceMaps(file) {
var _this, dir, outFileAbs, sourceMapModulesOutFileName, sourceMapOutFileName;
_this = this;
outFileAbs = path.resolve(file);
sourceMapOutFileName = path.basename(outFileAbs) + ".map";
sourceMapModulesOutFileName = path.basename(outFileAbs) + ".modules-map";
dir = path.dirname(outFileAbs);
fsExtra.ensureDirSync(dir);
if (this.lastResult.sourceMapUglify != null) {
fs.writeFileSync(path.join(dir, sourceMapOutFileName), this.lastResult.sourceMapUglify.replace(UGLIFY_SOURCE_MAP_TOKEN, sourceMapOutFileName));
}
return fs.writeFileSync(path.join(dir, sourceMapModulesOutFileName), JSON.stringify(_this.lastResult.sourceMapModules));
}
//pass in standard uglify options objects compress:{},output:null into oprions
uglify(optionsIn = {}) {
var a, ast, options, res, source;
if (!this.lastResult) {
this.merge();
}
options = {
mangle: true,
compress: {
drop_console: false,
hoist_funs: true,
loops: true,
evaluate: true,
conditionals: true
},
output: {
comments: false
},
strProtectionLvl: 0 //, reserved:"cachedModules"
};
_.extend(options, optionsIn);
if (!this.lastResult.source) {
return;
}
source = this.toString();
a = 1 + 1;
res = UglifyJS.minify(source, _.extend({
fromString: true,
outSourceMap: UGLIFY_SOURCE_MAP_TOKEN
}, options));
this.lastResult.source = res.code;
this.lastResult.sourceMapUglify = res.map;
switch (options.strProtectionLvl) {
case 1:
ast = packageUtils.getAst(this.lastResult.source);
this.lastResult.source = packageUtils.getSourceHexified(ast);
}
return this;
}
};
module.exports = NodeUglifier;
//npm publish
//uglifyjs test_man_combined.js -c warnings -m toplevel -r 'require,exports' -o test_man_combined.min.js --source-map test_man_combined.map --screw-ie8
////drop_console=true
//process,GLOBAL,require,exports
}).call(this);
//# sourceMappingURL=NodeUglifier.js.map