sonda
Version:
Universal bundle analyzer and visualizer that works with most popular bundlers and frameworks.
861 lines (845 loc) • 28 kB
JavaScript
import { styleText } from "util";
import { access, mkdir, readFile, readdir, writeFile } from "fs/promises";
import { basename, dirname, extname, format, isAbsolute, join, parse, posix, relative, resolve, win32 } from "path";
import { isBuiltin } from "module";
import open from "open";
import { brotliCompressSync, gzipSync } from "zlib";
import { existsSync, readFileSync, statSync } from "fs";
import remapping from "@ampproject/remapping";
//#region src/config.ts
var Config = class Config {
#options;
constructor(options, defaults) {
if (options instanceof Config) {
this.#options = options.#options;
return;
}
this.#options = Object.assign({
enabled: true,
include: null,
exclude: null,
format: "html",
filename: "sonda_[index]",
outputDir: ".sonda",
open: true,
deep: false,
sources: false,
gzip: false,
brotli: false,
server: false,
sourcesPathNormalizer: null
}, defaults, options);
}
clone() {
return new Config({}, structuredClone(this.#options));
}
get enabled() {
return this.#options.enabled;
}
get include() {
return this.#options.include;
}
get exclude() {
return this.#options.exclude;
}
get format() {
return this.#options.format;
}
get filename() {
return this.#options.filename;
}
get outputDir() {
return this.#options.outputDir;
}
get open() {
return this.#options.open;
}
get deep() {
return this.#options.deep;
}
get sources() {
return this.#options.sources;
}
get gzip() {
return this.#options.gzip;
}
get brotli() {
return this.#options.brotli;
}
get server() {
return this.#options.server;
}
get integration() {
return this.#options.integration;
}
get sourcesPathNormalizer() {
return this.#options.sourcesPathNormalizer;
}
set filename(filename) {
this.#options.filename = filename;
}
set sourcesPathNormalizer(normalizer) {
this.#options.sourcesPathNormalizer = normalizer;
}
};
//#endregion
//#region src/utils.ts
const extensions = {
".js": "script",
".jsx": "script",
".mjs": "script",
".cjs": "script",
".ts": "script",
".tsx": "script",
".cts": "script",
".mts": "script",
".json": "script",
".node": "script",
".wasm": "script",
".css": "style",
".scss": "style",
".sass": "style",
".less": "style",
".styl": "style",
".pcss": "style",
".postcss": "style",
".woff": "font",
".woff2": "font",
".ttf": "font",
".otf": "font",
".eot": "font",
".jpg": "image",
".jpeg": "image",
".png": "image",
".gif": "image",
".svg": "image",
".webp": "image",
".jxl": "image",
".avif": "image",
".ico": "image",
".bmp": "image",
".vue": "component",
".svelte": "component",
".astro": "component",
".marko": "component",
".riot": "component"
};
const ignoredExtensions = [".map", ".d.ts"];
/**
* Normalizes a given path by removing leading null characters and converting it to a relative POSIX path.
*/
function normalizePath(pathToNormalize) {
const normalized = pathToNormalize.replace(/^\0/, "");
const relativized = relative(process.cwd(), normalized);
return relativized.replaceAll(win32.sep, posix.sep);
}
/**
* Returns the type of a given file based on its name.
*/
function getTypeByName(name) {
return extensions[extname(name)] ?? "other";
}
/**
* Sort an array of objects by a specific key.
*/
function sortByKey(data, key) {
return data.sort((a, b) => a[key].localeCompare(b[key]));
}
/**
* Returns relative paths to all files in the given directory. The files are filtered to exclude source maps.
*/
async function getAllFiles(dir, recursive = true) {
try {
await access(dir);
const files = await readdir(dir, {
withFileTypes: true,
recursive
});
return files.filter((file) => file.isFile()).filter((file) => !hasIgnoredExtension(file.name)).map((file) => join(relative(process.cwd(), file.parentPath), file.name));
} catch {
return [];
}
}
/**
* Checks if a file name has an ignored extension. Using `endsWith` ensures that extensions like `.d.ts` are
* correctly identified as ignored, even though `extname` would return `.ts`.
*/
function hasIgnoredExtension(name) {
return ignoredExtensions.some((ext) => name.endsWith(ext));
}
//#endregion
//#region package.json
var version = "0.9.0";
//#endregion
//#region src/report/formatters/Formatter.ts
var Formatter = class {
config;
constructor(config) {
this.config = config;
}
/**
* Writes the report to the file system and returns the path to the report.
*/
async write(data) {
const path = await this.getOutputPath();
const content = await this.parse(data);
await mkdir(dirname(path), { recursive: true });
await writeFile(path, content);
return path;
}
/**
* Returns the output path for the report based on the configuration. It ensures
* that the path is absolute, has proper extension, and replaces the [index] placeholder
* with the next available index based on existing files in the output directory.
*/
async getOutputPath() {
const configPath = resolve(process.cwd(), this.config.outputDir, this.config.filename);
const path = format({
...parse(configPath),
base: "",
ext: "." + this.config.format
});
return this.replaceIndex(path);
}
/**
* Returns path with the [index] placeholder replaced by the next available index.
*/
async replaceIndex(path) {
if (!path.includes("[index]")) return path;
const { dir, base } = parse(path);
const regex = /* @__PURE__ */ new RegExp("^" + base.replace("[index]", "(\\d+)") + "$");
const versions = (await getAllFiles(dir)).map((path$1) => basename(path$1).match(regex)).filter((match) => match !== null).map((match) => parseInt(match[1], 10));
const maxVersion = Math.max(...versions, -1);
const version$1 = String(maxVersion + 1);
return path.replace("[index]", version$1);
}
};
//#endregion
//#region src/report/formatters/HtmlFormatter.ts
var HtmlFormatter = class extends Formatter {
extension = ".html";
async parse(data) {
const template = await readFile(resolve(import.meta.dirname, "./index.html"), "utf-8");
return template.replace("__REPORT_DATA__", gzipSync(JSON.stringify(data)).toString("base64"));
}
};
//#endregion
//#region src/report/formatters/JsonFormatter.ts
var JsonFormatter = class extends Formatter {
extension = ".json";
parse(data) {
return JSON.stringify(data, null, 2);
}
};
//#endregion
//#region ../load-source-map/dist/index.js
/**
* Strip any JSON XSSI avoidance prefix from the string (as documented in the source maps specification),
* and parses the string as JSON.
*
* https://github.com/mozilla/source-map/blob/3cb92cc3b73bfab27c146bae4ef2bc09dbb4e5ed/lib/util.js#L162-L164
*/
function parseSourceMapInput(str) {
return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, ""));
}
/**
sourceMappingURL=data:application/json;charset=utf-8;base64,data
sourceMappingURL=data:application/json;base64,data
sourceMappingURL=data:application/json;uri,data
sourceMappingURL=map-file-comment.css.map
sourceMappingURL=map-file-comment.css.map?query=value
*/
const sourceMappingRegExp = /[@#]\s*sourceMappingURL=(\S+)\b/g;
/**
* Checks if the given path is a file.
*/
function isFile(path) {
try {
return statSync(path).isFile();
} catch {
return false;
}
}
/**
* Default path normalizer that resolves the path relative to the source root.
*/
function defaultPathNormalizer(path, sourceRoot) {
return isAbsolute(path) ? path : resolve(sourceRoot, path);
}
function loadCodeAndMap(codePath, sourcesPathNormalizer) {
if (!isFile(codePath)) return null;
const code = readFileSync(codePath, "utf-8");
const maybeMap = loadMap(codePath, code);
if (!maybeMap) return { code };
const { map, mapPath } = maybeMap;
const sourceRoot = resolve(dirname(mapPath), map.sourceRoot ?? ".");
const normalizer = sourcesPathNormalizer || defaultPathNormalizer;
map.sources = map.sources.map((source) => source && normalizer(source, sourceRoot));
map.sourcesContent = loadMissingSourcesContent(map);
delete map.sourceRoot;
return {
code,
map
};
}
function loadMap(codePath, code) {
/**
* Because in most cases the source map has the same name as the code file,
* we can try to append `.map` to the code path and check if the file exists.
*/
try {
const possibleMapPath = codePath + ".map";
const map = readFileSync(possibleMapPath, "utf-8");
return {
map: parseSourceMapInput(map),
mapPath: possibleMapPath
};
} catch {}
/**
* If the source map is not found by file name, we can try to extract it from the code.
* The path to the source map is usually in a comment at the end of the file, but it can
* also be inlined in the code itself.
*/
const extractedComment = code.includes("sourceMappingURL") && Array.from(code.matchAll(sourceMappingRegExp)).at(-1);
if (!extractedComment || !extractedComment.length) return null;
const sourceMappingURL = extractedComment[1];
if (sourceMappingURL.startsWith("data:")) {
const map = parseDataUrl(sourceMappingURL);
return {
map: parseSourceMapInput(map),
mapPath: codePath
};
}
const sourceMapFilename = new URL(sourceMappingURL, "file://").pathname;
const mapPath = join(dirname(codePath), sourceMapFilename);
if (!existsSync(mapPath)) return null;
return {
map: parseSourceMapInput(readFileSync(mapPath, "utf-8")),
mapPath
};
}
function parseDataUrl(url) {
const [prefix, payload] = url.split(",");
const encoding = prefix.split(";").at(-1);
switch (encoding) {
case "base64": return Buffer.from(payload, "base64").toString();
case "uri": return decodeURIComponent(payload);
default: throw new Error("Unsupported source map encoding: " + encoding);
}
}
/**
* Loop through the sources and try to load missing `sourcesContent` from the file system.
*/
function loadMissingSourcesContent(map) {
return map.sources.map((source, index) => {
if (map.sourcesContent?.[index]) return map.sourcesContent[index];
if (source && existsSync(source)) return readFileSync(source, "utf-8");
return null;
});
}
//#endregion
//#region src/report/processors/sourcemap.ts
const UNASSIGNED = "[unassigned]";
function getBytesPerSource(code, map, assetSizes, config) {
const contributions = getContributions(map.sources);
const codeLines = code.split(/(?<=\r?\n)/);
for (let lineIndex = 0; lineIndex < codeLines.length; lineIndex++) {
const lineCode = codeLines[lineIndex];
const mappings = map.mappings[lineIndex] || [];
let currentColumn = 0;
for (let i = 0; i <= mappings.length; i++) {
const mapping = mappings[i];
const startColumn = mapping?.[0] ?? lineCode.length;
const endColumn = mappings[i + 1]?.[0] ?? lineCode.length;
if (startColumn > currentColumn) contributions.set(UNASSIGNED, contributions.get(UNASSIGNED) + lineCode.slice(currentColumn, startColumn));
if (mapping) {
const sourceIndex = mapping?.[1];
const codeSlice = lineCode.slice(startColumn, endColumn);
const source = sourceIndex !== void 0 && map.sources[sourceIndex] || UNASSIGNED;
contributions.set(source, contributions.get(source) + codeSlice);
currentColumn = endColumn;
} else currentColumn = startColumn;
}
}
const sourceSizes = /* @__PURE__ */ new Map();
const contributionsSum = {
uncompressed: 0,
gzip: 0,
brotli: 0
};
for (const [source, codeSegment] of contributions) {
const sizes = getSizes(codeSegment, config);
contributionsSum.uncompressed += sizes.uncompressed;
contributionsSum.gzip += sizes.gzip;
contributionsSum.brotli += sizes.brotli;
sourceSizes.set(source, sizes);
}
return adjustSizes(sourceSizes, assetSizes, contributionsSum, config);
}
/**
* Returns the sizes of the given code based on the configuration.
*/
function getSizes(code, config) {
return {
uncompressed: Buffer.byteLength(code),
gzip: config.gzip ? gzipSync(code).length : 0,
brotli: config.brotli ? brotliCompressSync(code).length : 0
};
}
function getContributions(sources) {
const contributions = /* @__PURE__ */ new Map();
sources.filter((source) => source !== null).forEach((source) => contributions.set(source, ""));
contributions.set(UNASSIGNED, "");
return contributions;
}
/**
* Compression efficiency improves with the size of the file.
*
* However, what we have is the compressed size of the entire bundle (`actual`),
* the sum of all files compressed individually (`sum`) and the compressed
* size of a given file (`content`). The last value is essentially a “worst-case”
* scenario, and the actual size of the file in the bundle is likely to be smaller.
*
* We use this information to estimate the actual size of the file in the bundle
* after compression.
*/
function adjustSizes(sources, asset, sums, config) {
const gzipDelta = config.gzip ? asset.gzip / sums.gzip : 0;
const brotliDelta = config.brotli ? asset.brotli / sums.brotli : 0;
for (const [source, sizes] of sources) sources.set(source, {
uncompressed: sizes.uncompressed,
gzip: config.gzip ? Math.round(sizes.gzip * gzipDelta) : 0,
brotli: config.brotli ? Math.round(sizes.brotli * brotliDelta) : 0
});
return sources;
}
//#endregion
//#region src/report/processors/outputs.ts
const RESOURCE_TYPES_TO_ANALYZE = ["script", "style"];
const parentMap = {};
/**
* Update the report with the output assets and their sources from the source map.
*/
function updateOutput(report, path, entrypoints) {
const type = getTypeByName(path);
RESOURCE_TYPES_TO_ANALYZE.includes(type) ? addAnalyzableType(report, path, entrypoints, type) : addNonAnalyzableType(report, path, type);
}
/**
* Adds simple assets like fonts, images, etc. to the report without analyzing their content or dependencies.
*/
function addNonAnalyzableType(report, path, type) {
const content = readFileSync(path);
const sizes = getSizes(content, report.config);
report.addResource({
kind: "asset",
name: normalizePath(path),
type,
...sizes
});
}
/**
* Adds code assets like scripts and styles to the report and analyzes their content
* to find their sources and dependencies.
*/
function addAnalyzableType(report, path, entrypoints, type) {
const assetName = normalizePath(path);
const codeMap = getSource(path, report.config);
if (!codeMap) return addNonAnalyzableType(report, path, type);
const { code, map } = codeMap;
const sizes = getSizes(code, report.config);
const sourcesSizes = getBytesPerSource(code, map, sizes, report.config);
report.addResource({
kind: "asset",
name: assetName,
type,
...sizes
});
if (report.config.sources) report.addSourceMap(assetName, normalizeSourceMap(map));
entrypoints?.forEach((entry) => report.addConnection({
kind: "entrypoint",
source: assetName,
target: normalizePath(entry),
original: null
}));
for (const [source, sizes$1] of sourcesSizes) {
const name = normalizePath(source);
const type$1 = getTypeByName(source);
const parent = parentMap[source] ? normalizePath(parentMap[source]) : null;
const existingSource = report.resources.find((resource) => resource.name === name && resource.kind === "filesystem");
if (!existingSource) {
const index = map.sources.indexOf(source);
const { uncompressed } = getSizes(map.sourcesContent?.[index] || "", {
gzip: false,
brotli: false
});
report.addResource({
kind: "sourcemap",
name,
type: type$1,
format: "other",
uncompressed,
parent: parent || null
});
}
report.addResource({
kind: "chunk",
name,
type: type$1,
format: existingSource?.format || "other",
...sizes$1,
parent: assetName
});
if (parent) report.addConnection({
kind: "sourcemap",
source: parent,
target: name,
original: null
});
}
}
/**
* Normalize the source map to a format expected by the report.
*/
function normalizeSourceMap(map) {
return {
mappings: map.mappings,
sources: map.sources.map((source) => source && normalizePath(source)),
sourcesContent: map.sourcesContent
};
}
/**
* Load the code and source map from the given path. If the `deep` option
* is enabled, it will recursively load the source maps of the sources
* until it finds the original source.
*/
function getSource(path, config) {
const codeMap = loadCodeAndMap(path, config.sourcesPathNormalizer);
if (!codeMap || !codeMap.map) return null;
return {
code: codeMap.code,
map: parseSourceMap(codeMap.map, config.deep)
};
}
/**
* Parse the source map. If `options.deep` is set to `true`, it will
* recursively load the source maps of the sources until it finds
* the original source. Otherwise, it will only decode the source map.
*/
function parseSourceMap(map, deep) {
const alreadyRemapped = /* @__PURE__ */ new Set();
return remapping(map, (file, ctx) => {
if (!deep || alreadyRemapped.has(file)) return;
alreadyRemapped.add(file);
const codeMap = loadCodeAndMap(resolve(process.cwd(), file));
if (!codeMap) return;
ctx.content ??= codeMap.code;
codeMap.map?.sources.filter((source) => source !== null && file !== source).forEach((source) => parentMap[source] = file);
return codeMap.map;
}, { decodedMappings: true });
}
//#endregion
//#region src/report/processors/dependencies.ts
const packageNameRegExp = /(.*)(?:.*node_modules\/)(@[^\/]+\/[^\/]+|[^\/]+)/;
/**
* Finds all external dependencies based on the filesystem resources
* and adds them to the report with their paths.
*/
function updateDependencies(report) {
const dependencies = {};
report.resources.map((file) => packageNameRegExp.exec(file.name)).filter((match) => match !== null).forEach(([path, , name]) => {
const paths = dependencies[name] ??= [];
if (!paths.includes(path)) paths.push(path);
});
return Object.entries(dependencies).map(([name, paths]) => ({
name,
paths
}));
}
//#endregion
//#region src/report/report.ts
const formatters = {
"html": HtmlFormatter,
"json": JsonFormatter
};
var Report = class {
config;
resources = [];
connections = [];
assets = {};
metadata;
dependencies = [];
issues = [];
sourcemaps = [];
constructor(config) {
this.config = config;
this.metadata = {
version,
integration: config.integration,
sources: config.sources,
gzip: config.gzip,
brotli: config.brotli
};
}
addResource(resource) {
if (resource.name.startsWith("data:") || hasIgnoredExtension(resource.name)) return;
const existing = this.resources.find((r) => r.kind === resource.kind && r.name === resource.name && r.parent === resource.parent);
if (existing) return;
this.resources.push(resource);
}
addConnection(connection) {
if (connection.target.startsWith("data:") || hasIgnoredExtension(connection.source) || hasIgnoredExtension(connection.target) || isBuiltin(connection.target)) return;
const existing = this.connections.find((c) => {
return c.kind === connection.kind && c.source === connection.source && c.target === connection.target;
});
if (!existing) {
this.connections.push(connection);
return;
}
/**
* If a connection already exists, update the `original` property if either connection has it.
* If both connections have the `original` property, prioritize the shorter one because it is
* more likely to be the original source than the absolute path.
*/
existing.original = [connection.original, existing.original].filter((original) => original !== null).sort((a, b) => a.length - b.length)[0] || null;
}
addAsset(name, entrypoints) {
if (hasIgnoredExtension(name)) return;
const normalizedName = normalizePath(name);
if (this.config.exclude?.some((pattern) => pattern.test(normalizedName))) return;
if (this.config.include && !this.config.include.some((pattern) => pattern.test(normalizedName))) return;
this.assets[name] = entrypoints;
}
async generate() {
for (const [path$1, entrypoints] of Object.entries(this.assets)) updateOutput(this, path$1, entrypoints);
this.dependencies = updateDependencies(this);
const formatter = new formatters[this.config.format](this.config);
const path = await formatter.write(this.#getFormattedData());
if (this.config.open) await open(path);
return path;
}
addSourceMap(asset, sourcemap) {
if (this.sourcemaps.some((sm) => sm.name === asset)) return;
this.sourcemaps.push({
name: asset,
map: JSON.stringify(sourcemap)
});
}
#getFormattedData() {
return {
metadata: this.metadata,
resources: sortByKey(this.resources, "name"),
connections: this.connections,
dependencies: sortByKey(this.dependencies, "name"),
issues: this.issues,
sourcemaps: this.sourcemaps
};
}
};
//#endregion
//#region src/integrations/esbuild.ts
function SondaEsbuildPlugin(userOptions = {}) {
const options = new Config(userOptions, { integration: "esbuild" });
return {
name: "sonda-esbuild",
setup(build) {
if (!options.enabled) return;
build.initialOptions.metafile = true;
build.onEnd((result) => processEsbuildMetafile(result.metafile, options));
}
};
}
async function processEsbuildMetafile(metafile, options) {
const report = new Report(options);
for (const [path, input] of Object.entries(metafile.inputs)) {
const name = normalizePath(path);
report.addResource({
kind: "filesystem",
name,
type: getTypeByName(path),
format: input.format || "other",
uncompressed: input.bytes
});
input.imports.forEach((imp) => {
report.addConnection({
kind: connectionKindMapper$1(imp.kind),
source: name,
target: normalizePath(imp.path),
original: imp.original || null
});
});
}
for (const [path, output] of Object.entries(metafile.outputs)) {
report.addAsset(path, output.entryPoint ? [output.entryPoint] : void 0);
if (output.entryPoint) report.addConnection({
kind: "entrypoint",
source: normalizePath(output.entryPoint),
target: normalizePath(path),
original: null
});
}
const reportPath = await report.generate();
console.info(styleText("green", `📝 Sonda report generated: ${reportPath}`));
}
/**
* Maps esbuild's ImportKind to Sonda's ConnectionKind.
*/
function connectionKindMapper$1(kind) {
switch (kind) {
case "entry-point": return "entrypoint";
case "import-statement":
case "import-rule": return "import";
case "require-call":
case "require-resolve": return "require";
case "dynamic-import": return "dynamic-import";
default: return "import";
}
}
//#endregion
//#region src/integrations/rollup.ts
function SondaRollupPlugin(userOptions = {}) {
const options = new Config(userOptions, { integration: "rollup" });
if (!options.enabled) return { name: "sonda/rollup" };
const report = new Report(options);
return {
name: "sonda/rollup",
async resolveId(source, importer, options$1) {
if (!importer) return;
const resolved = await this.resolve(source, importer, {
...options$1,
skipSelf: true
});
if (resolved) report.addConnection({
kind: "import",
source: normalizePath(importer),
target: normalizePath(resolved.id),
original: source
});
return resolved;
},
moduleParsed(module) {
const name = normalizePath(module.id);
report.addResource({
kind: "filesystem",
name,
type: getTypeByName(name),
format: getModuleFormat(name, module),
uncompressed: module.code ? Buffer.byteLength(module.code) : 0
});
},
async writeBundle({ dir, file }, bundle) {
const outputDir = resolve(process.cwd(), dir ?? dirname(file));
for (const [path, asset] of Object.entries(bundle)) report.addAsset(resolve(outputDir, path), asset.type === "chunk" && asset.facadeModuleId ? [asset.facadeModuleId] : void 0);
const reportPath = await report.generate();
this.info(styleText("green", `📝 Sonda report generated: ${reportPath}`));
}
};
}
function getModuleFormat(name, module) {
if (getTypeByName(name) !== "script") return "other";
const ext = extname(module.id);
return module.meta.commonjs?.isCommonJS === true || ext === ".cjs" || ext === ".cts" ? "cjs" : "esm";
}
//#endregion
//#region src/integrations/vite.ts
function SondaVitePlugin(userOptions = {}) {
const options = new Config(userOptions, { integration: "vite" });
if (!options.enabled) return { name: "sonda/vite" };
return {
...SondaRollupPlugin(options),
name: "sonda/vite",
enforce: "pre",
apply: "build"
};
}
//#endregion
//#region src/integrations/webpack.ts
var SondaWebpackPlugin = class {
options;
constructor(userOptions = {}) {
this.options = new Config(userOptions, { integration: "webpack" });
}
apply(compiler) {
if (!this.options.enabled) return;
const report = new Report(this.options);
const namespace = compiler.options.output.devtoolNamespace || compiler.options.output.library?.name || "[^/]+/";
/**
* Regex that matches the default Webpack source map filename format
* (https://webpack.js.org/configuration/output/#outputdevtoolmodulefilenametemplate).
*
* Examples:
* - webpack://[namespace]/[path]?[loaders]
* - webpack://[namespace]?[loaders]
* - [namespace]/[path]?[loaders]
* - [path]?[loaders]
* - All of the above without `?[loaders]`
*
* While it doesn't cover all possible cases, it should be enough for now.
*
* Regex explanation:
* - (?:webpack://)? - Non-capturing group that matches the optional "webpack://" prefix
* - (?:${ namespace })? - Non-capturing group that matches the optional namespace
* - ([^?]*) - Matches the path, which is everything up to the first "?" (if present)
*/
const sourceMapFilenameRegex = /* @__PURE__ */ new RegExp(`(?:webpack://)?(?:${namespace})?([^?]*)`);
compiler.hooks.afterEmit.tapPromise("SondaWebpackPlugin", async (compilation) => {
for (const mod of compilation.modules) {
const name = mod.nameForCondition();
if (!name) continue;
const module = mod.modules?.find((module$1) => module$1.nameForCondition() === name) || mod;
const normalizedName = normalizePath(name);
report.addResource({
kind: "filesystem",
name: normalizedName,
type: getTypeByName(normalizedName),
format: getFormat(normalizedName, module),
uncompressed: module.size()
});
Array.from(compilation.moduleGraph.getOutgoingConnections(module)).filter((connection) => {
const target = connection.module?.nameForCondition();
return !!target && target !== name;
}).map((connection) => ({
kind: connectionKindMapper(connection),
target: normalizePath(connection.module?.nameForCondition()),
original: connection.dependency?.request
})).forEach(({ kind, target, original }) => report.addConnection({
kind,
source: normalizedName,
target,
original
}));
}
for (const name of Object.keys(compilation.assets)) {
let entry = void 0;
for (const chunk of compilation.chunks) {
if (!chunk.files.has(name)) continue;
entry = Array.from(compilation.chunkGraph.getChunkEntryModulesIterable(chunk)).map((module) => module.nameForCondition());
}
report.addAsset(join(compilation.outputOptions.path, name), entry);
}
this.options.sourcesPathNormalizer = (path) => {
if (!path.startsWith("webpack://")) return resolve(process.cwd(), path);
const [, filePath] = path.match(sourceMapFilenameRegex);
return filePath ? resolve(process.cwd(), filePath) : UNASSIGNED;
};
const reportPath = await report.generate();
compilation.getLogger("SondaWebpackPlugin").info(styleText("green", `📝 Sonda report generated: ${reportPath}`));
});
}
};
function getFormat(name, module) {
if (getTypeByName(name) !== "script") return "other";
return module.type === "javascript/esm" ? "esm" : "cjs";
}
/**
* Maps esbuild's ImportKind to Sonda's ConnectionKind.
*/
function connectionKindMapper(connection) {
if (!connection.dependency) return "import";
const { category, type } = connection.dependency;
if (category === "esm" && type === "import()") return "dynamic-import";
if (category === "esm" || category === "css-import") return "import";
if (category === "commonjs") return "require";
return "import";
}
//#endregion
export { Config, Report, SondaEsbuildPlugin, SondaRollupPlugin, SondaVitePlugin, SondaWebpackPlugin, getTypeByName, normalizePath, processEsbuildMetafile };