@parcel/core
Version:
260 lines (257 loc) • 10.6 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = createWriteBundleRequest;
var _constants = require("../constants");
function _nullthrows() {
const data = _interopRequireDefault(require("nullthrows"));
_nullthrows = function () {
return data;
};
return data;
}
function _path() {
const data = _interopRequireDefault(require("path"));
_path = function () {
return data;
};
return data;
}
var _Bundle = require("../public/Bundle");
function _utils() {
const data = require("@parcel/utils");
_utils = function () {
return data;
};
return data;
}
function _stream() {
const data = require("stream");
_stream = function () {
return data;
};
return data;
}
var _projectPath = require("../projectPath");
var _ParcelConfigRequest = _interopRequireWildcard(require("./ParcelConfigRequest"));
var _PluginOptions = _interopRequireDefault(require("../public/PluginOptions"));
function _logger() {
const data = require("@parcel/logger");
_logger = function () {
return data;
};
return data;
}
var _DevDepRequest = require("./DevDepRequest");
var _ParcelConfig = _interopRequireDefault(require("../ParcelConfig"));
function _diagnostic() {
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
_diagnostic = function () {
return data;
};
return data;
}
function _profiler() {
const data = require("@parcel/profiler");
_profiler = function () {
return data;
};
return data;
}
var _RequestTracker = require("../RequestTracker");
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
const HASH_REF_PREFIX_LEN = _constants.HASH_REF_PREFIX.length;
const BOUNDARY_LENGTH = _constants.HASH_REF_PREFIX.length + 32 - 1;
/**
* Writes a bundle to the dist directory, replacing hash references with the final content hashes.
*/
function createWriteBundleRequest(input) {
let name = (0, _nullthrows().default)(input.bundle.name);
let nameHash = (0, _nullthrows().default)(input.hashRefToNameHash.get(input.bundle.hashReference));
return {
id: `${input.bundle.id}:${input.info.hash}:${nameHash}:${name}`,
type: _RequestTracker.requestTypes.write_bundle_request,
run,
input
};
}
async function run({
input,
options,
api
}) {
let {
bundleGraph,
bundle,
info,
hashRefToNameHash
} = input;
let {
inputFS,
outputFS
} = options;
let name = (0, _nullthrows().default)(bundle.name);
let thisHashReference = bundle.hashReference;
if (info.type !== bundle.type) {
name = name.slice(0, -_path().default.extname(name).length) + '.' + info.type;
}
if (name.includes(thisHashReference)) {
let thisNameHash = (0, _nullthrows().default)(hashRefToNameHash.get(thisHashReference));
name = name.replace(thisHashReference, thisNameHash);
}
let filePath = (0, _projectPath.joinProjectPath)(bundle.target.distDir, name);
// Watch the bundle and source map for deletion.
// Also watch the dist dir because invalidateOnFileDelete does not currently
// invalidate when a parent directory is deleted.
// TODO: do we want to also watch for file edits?
api.invalidateOnFileDelete(bundle.target.distDir);
api.invalidateOnFileDelete(filePath);
let cacheKeys = info.cacheKeys;
let mapKey = cacheKeys.map;
let fullPath = (0, _projectPath.fromProjectPath)(options.projectRoot, filePath);
if (mapKey && bundle.env.sourceMap && !bundle.env.sourceMap.inline) {
api.invalidateOnFileDelete((0, _projectPath.toProjectPath)(options.projectRoot, fullPath + '.map'));
}
let dir = _path().default.dirname(fullPath);
await outputFS.mkdirp(dir); // ? Got rid of dist exists, is this an expensive operation
// Use the file mode from the entry asset as the file mode for the bundle.
// Don't do this for browser builds, as the executable bit in particular is unnecessary.
let publicBundle = _Bundle.NamedBundle.get(bundle, bundleGraph, options);
let mainEntry = publicBundle.getMainEntry();
let writeOptions = publicBundle.env.isBrowser() || !mainEntry ? undefined : {
mode: (await inputFS.stat(mainEntry.filePath)).mode
};
let contentStream;
if (info.isLargeBlob) {
contentStream = options.cache.getStream(cacheKeys.content);
} else {
contentStream = (0, _utils().blobToStream)(await options.cache.getBlob(cacheKeys.content));
}
let configResult = (0, _nullthrows().default)(await api.runRequest((0, _ParcelConfigRequest.default)()));
let config = (0, _ParcelConfigRequest.getCachedParcelConfig)(configResult, options);
let {
devDeps,
invalidDevDeps
} = await (0, _DevDepRequest.getDevDepRequests)(api);
(0, _DevDepRequest.invalidateDevDeps)(invalidDevDeps, options, config);
let files = await writeFiles(contentStream, info, hashRefToNameHash, options, config, outputFS, filePath, writeOptions, devDeps, api);
if (mapKey && bundle.env.sourceMap && !bundle.env.sourceMap.inline && (await options.cache.has(mapKey))) {
let mapFiles = await writeFiles((0, _utils().blobToStream)(await options.cache.getBlob(mapKey)), info, hashRefToNameHash, options, config, outputFS, (0, _projectPath.toProjectPathUnsafe)((0, _projectPath.fromProjectPathRelative)(filePath) + '.map'), writeOptions, devDeps, api);
files.push(...mapFiles);
}
api.storeResult(files);
return files;
}
async function writeFiles(inputStream, info, hashRefToNameHash, options, config, outputFS, filePath, writeOptions, devDeps, api) {
let compressors = await config.getCompressors((0, _projectPath.fromProjectPathRelative)(filePath));
let stream = info.hashReferences.length ? inputStream.pipe(replaceStream(hashRefToNameHash)) : inputStream;
let promises = [];
for (let compressor of compressors) {
promises.push(runCompressor(compressor, info, cloneStream(stream), options, outputFS, filePath, writeOptions, devDeps, api));
}
let results = await Promise.all(promises);
return results.filter(Boolean);
}
async function runCompressor(compressor, info, stream, options, outputFS, inputFilePath, writeOptions, devDeps, api) {
let measurement;
try {
measurement = _profiler().tracer.createMeasurement(compressor.name, 'compress', (0, _projectPath.fromProjectPathRelative)(inputFilePath));
let res = await compressor.plugin.compress({
stream,
options: new _PluginOptions.default(options),
logger: new (_logger().PluginLogger)({
origin: compressor.name
}),
tracer: new (_profiler().PluginTracer)({
origin: compressor.name,
category: 'compress'
})
});
let filePath = inputFilePath;
if (res != null) {
if (res.type != null) {
let type = res.type;
filePath = (0, _projectPath.toProjectPathUnsafe)((0, _projectPath.fromProjectPathRelative)(filePath) + '.' + type);
}
let size = 0;
let stream = res.stream.pipe(new (_utils().TapStream)(buf => {
size += buf.length;
}));
let fullPath = (0, _projectPath.fromProjectPath)(options.projectRoot, filePath);
await new Promise((resolve, reject) => (0, _stream().pipeline)(stream, outputFS.createWriteStream(fullPath, writeOptions), err => {
if (err) reject(err);else resolve();
}));
return {
filePath,
type: info.type,
stats: {
size,
time: info.time ?? 0
}
};
}
return null;
} catch (err) {
throw new (_diagnostic().default)({
diagnostic: (0, _diagnostic().errorToDiagnostic)(err, {
origin: compressor.name
})
});
} finally {
measurement && measurement.end();
// Add dev deps for compressor plugins AFTER running them, to account for lazy require().
let devDepRequest = await (0, _DevDepRequest.createDevDependency)({
specifier: compressor.name,
resolveFrom: compressor.resolveFrom
}, devDeps, options);
await (0, _DevDepRequest.runDevDepRequest)(api, devDepRequest);
}
}
function replaceStream(hashRefToNameHash) {
let boundaryStr = Buffer.alloc(0);
let replaced = Buffer.alloc(0);
return new (_stream().Transform)({
transform(chunk, encoding, cb) {
let str = Buffer.concat([boundaryStr, Buffer.from(chunk)]);
let lastMatchI = 0;
if (replaced.length < str.byteLength) {
replaced = Buffer.alloc(str.byteLength);
}
let replacedLength = 0;
while (lastMatchI < str.byteLength) {
let matchI = str.indexOf(_constants.HASH_REF_PREFIX, lastMatchI);
if (matchI === -1) {
replaced.set(str.subarray(lastMatchI, str.byteLength), replacedLength);
replacedLength += str.byteLength - lastMatchI;
break;
} else {
let match = str.subarray(matchI, matchI + HASH_REF_PREFIX_LEN + _constants.HASH_REF_HASH_LEN).toString();
let replacement = Buffer.from(hashRefToNameHash.get(match) ?? match);
replaced.set(str.subarray(lastMatchI, matchI), replacedLength);
replacedLength += matchI - lastMatchI;
replaced.set(replacement, replacedLength);
replacedLength += replacement.byteLength;
lastMatchI = matchI + HASH_REF_PREFIX_LEN + _constants.HASH_REF_HASH_LEN;
}
}
boundaryStr = replaced.subarray(replacedLength - BOUNDARY_LENGTH, replacedLength);
let strUpToBoundary = replaced.subarray(0, replacedLength - BOUNDARY_LENGTH);
cb(null, strUpToBoundary);
},
flush(cb) {
cb(null, boundaryStr);
}
});
}
function cloneStream(readable) {
let res = new (_stream().Readable)();
// $FlowFixMe
res._read = () => {};
readable.on('data', chunk => res.push(chunk));
readable.on('end', () => res.push(null));
readable.on('error', err => res.emit('error', err));
return res;
}