UNPKG

vitest

Version:

Next generation testing framework powered by Vite

1,288 lines (1,276 loc) 493 kB
import fs, { promises, existsSync, mkdirSync, readFileSync, statSync, readdirSync, writeFileSync } from 'node:fs'; import { relative, resolve, dirname, join, extname, normalize, basename, isAbsolute } from 'pathe'; import { C as CoverageProviderMap } from './coverage.CTzCuANN.js'; import path, { resolve as resolve$1 } from 'node:path'; import { unique, createDefer, slash, withTrailingSlash, cleanUrl, wrapId, isExternalUrl, unwrapId, toArray, noop, deepMerge, nanoid, deepClone, isPrimitive, notNullish } from '@vitest/utils/helpers'; import { a as any, p as prompt } from './index.og1WyBLx.js'; import { i as isPackageExists, r as resolveModule } from './index.BCY_7LL2.js'; import * as vite from 'vite'; import { parseAst, searchForWorkspaceRoot, fetchModule, version, createServer, isFileLoadingAllowed, normalizePath, isRunnableDevEnvironment, mergeConfig } from 'vite'; import { A as API_PATH, c as configFiles, d as defaultBrowserPort, b as defaultPort } from './constants.CPYnjOGj.js'; import * as nodeos from 'node:os'; import nodeos__default, { tmpdir } from 'node:os'; import { generateHash as generateHash$1, createTaskName, validateTags, calculateSuiteHash, someTasksAreOnly, interpretTaskModes, hasFailed, generateFileHash, limitConcurrency, createFileTask as createFileTask$1, getTasks, isTestCase } from '@vitest/runner/utils'; import { SnapshotManager } from '@vitest/snapshot/manager'; import { serializeValue } from '@vitest/utils/serialize'; import { v as version$1 } from './cac.DRKYQDPl.js'; import { rootDir, distDir } from '../path.js'; import { f as createIndexLocationsMap, g as TraceMap, o as originalPositionFor, h as ancestor, s as stringify, p as parse, i as printError, j as Typechecker, k as generateCodeFrame, l as escapeRegExp, m as createDefinesScript, a as BenchmarkReportsMap, R as ReportersMap, n as groupBy, q as BlobReporter, r as readBlobs, t as convertTasksToEvents, H as HangingProcessReporter, w as wildcardPatternToRegExp, u as stdout } from './index.0GYC6HAu.js'; import { N as NativeModuleRunner } from './nativeModuleRunner.BIakptoF.js'; import { T as Traces } from './traces.DT5aQ62U.js'; import { createDebug } from 'obug'; import { h as hash, R as RandomSequencer, i as isBrowserEnabled, r as resolveConfig, g as getCoverageProvider, b as resolveApiServerConfig } from './coverage.kqM80boz.js'; import { rm, readFile, writeFile, rename, stat, unlink, mkdir, copyFile } from 'node:fs/promises'; import c from 'tinyrainbow'; import { VitestModuleEvaluator } from '#module-evaluator'; import { ModuleRunner } from 'vite/module-runner'; import { Console } from 'node:console'; import { g as getDefaultExportFromCjs } from './_commonjsHelpers.D26ty3Ew.js'; import { g as formatProjectName, w as withLabel, e as errorBanner, d as divider } from './utils.DK8FXp4T.js'; import { createRequire, builtinModules, isBuiltin as isBuiltin$1 } from 'node:module'; import url, { fileURLToPath, pathToFileURL } from 'node:url'; import { i as isTTY, a as isWindows } from './env.D4Lgay0q.js'; import { isatty } from 'node:tty'; import crypto, { createHash } from 'node:crypto'; import EventEmitter$1, { EventEmitter } from 'node:events'; import { c as createBirpc } from './index.Chj8NDwU.js'; import { i as isBuiltin, t as toBuiltin } from './modules.BJuCwlRJ.js'; import { fork } from 'node:child_process'; import { Worker } from 'node:worker_threads'; import pm from 'picomatch'; import { glob, isDynamicPattern } from 'tinyglobby'; import { performance as performance$1 } from 'node:perf_hooks'; import require$$0$3 from 'events'; import require$$1$1 from 'https'; import require$$2 from 'http'; import require$$3 from 'net'; import require$$4 from 'tls'; import require$$1 from 'crypto'; import require$$0$2 from 'stream'; import require$$7 from 'url'; import require$$0 from 'zlib'; import require$$0$1 from 'buffer'; import { c as configDefaults } from './defaults.9aQKnqFk.js'; import { isAgent } from 'std-env'; import MagicString from 'magic-string'; import { hoistMocksPlugin, automockPlugin } from '@vitest/mocker/node'; import { KNOWN_ASSET_RE } from '@vitest/utils/constants'; import { findNearestPackageData } from '@vitest/utils/resolver'; import * as esModuleLexer from 'es-module-lexer'; import assert$1 from 'node:assert'; import { parseErrorStacktrace } from '@vitest/utils/source-map'; import { extractSourcemapFromFile } from '@vitest/utils/source-map/node'; import { x } from 'tinyexec'; import readline from 'node:readline'; import { stripVTControlCharacters } from 'node:util'; function createDebugger(namespace) { const debug = createDebug(namespace); if (debug.enabled) return debug; } async function getSpecificationsOptions(specifications) { const environments = /* @__PURE__ */ new WeakMap(); const cache = /* @__PURE__ */ new Map(); const tags = /* @__PURE__ */ new WeakMap(); await Promise.all(specifications.map(async (spec) => { const { moduleId: filepath, project, pool } = spec; // browser pool handles its own environment if (pool === "browser") return; // reuse if projects have the same test files let code = cache.get(filepath); if (!code) { code = await promises.readFile(filepath, "utf-8").catch(() => ""); cache.set(filepath, code); } const { env = project.config.environment || "node", envOptions, tags: specTags = [] } = detectCodeBlock(code); tags.set(spec, specTags); const environment = { name: env, options: envOptions ? { [env === "happy-dom" ? "happyDOM" : env]: envOptions } : null }; environments.set(spec, environment); })); return { environments, tags }; } function detectCodeBlock(content) { const env = content.match(/@(?:vitest|jest)-environment\s+([\w-]+)\b/)?.[1]; let envOptionsJson = content.match(/@(?:vitest|jest)-environment-options\s+(.+)/)?.[1]; if (envOptionsJson?.endsWith("*/")) // Trim closing Docblock characters the above regex might have captured envOptionsJson = envOptionsJson.slice(0, -2); const envOptions = JSON.parse(envOptionsJson || "null"); const tags = []; let tagMatch; // eslint-disable-next-line no-cond-assign while (tagMatch = content.match(/(\/\/|\*)\s*@module-tag\s+([\w\-/]+)\b/)) { tags.push(tagMatch[2]); content = content.slice(tagMatch.index + tagMatch[0].length); } return { env, envOptions, tags }; } const debug$1 = createDebugger("vitest:ast-collect-info"); const verbose = createDebugger("vitest:ast-collect-verbose"); function isTestFunctionName(name) { return name === "it" || name === "test" || name.startsWith("test") || name.endsWith("Test"); } function isVitestFunctionName(name) { return name === "describe" || name === "suite" || isTestFunctionName(name); } function astParseFile(filepath, code) { const ast = parseAst(code); if (verbose) verbose("Collecting", filepath, code); else debug$1?.("Collecting", filepath); const definitions = []; const getName = (callee) => { if (!callee) return null; if (callee.type === "Identifier") return callee.name; if (callee.type === "CallExpression") return getName(callee.callee); if (callee.type === "TaggedTemplateExpression") return getName(callee.tag); if (callee.type === "MemberExpression") { if (callee.object?.type === "Identifier" && isVitestFunctionName(callee.object.name)) return callee.object?.name; if (callee.object?.name?.startsWith("__vite_ssr_") || callee.object?.object?.name?.startsWith("__vite_ssr_") && callee.object?.property?.name === "Vitest") return getName(callee.property); // call as `__vite_ssr__.test.skip()` or `describe.concurrent.each()` return getName(callee.object); } // unwrap (0, ...) if (callee.type === "SequenceExpression" && callee.expressions.length === 2) { const [e0, e1] = callee.expressions; if (e0.type === "Literal" && e0.value === 0) return getName(e1); } return null; }; const getProperties = (callee) => { if (!callee) return []; if (callee.type === "Identifier") return []; if (callee.type === "CallExpression") return getProperties(callee.callee); if (callee.type === "TaggedTemplateExpression") return getProperties(callee.tag); if (callee.type === "MemberExpression") { const props = getProperties(callee.object); if (callee.property?.name) props.push(callee.property.name); return props; } return []; }; ancestor(ast, { CallExpression(node) { const { callee } = node; const name = getName(callee); if (!name) return; if (!isVitestFunctionName(name)) { verbose?.(`Skipping ${name} (unknown call)`); return; } const properties = getProperties(callee); const property = callee?.property?.name; // intermediate calls like .each(), .for() will be picked up in the next iteration if (property && [ "each", "for", "skipIf", "runIf", "extend", "scoped", "override" ].includes(property)) return; // skip properties on return values of calls - e.g., test('name', fn).skip() if (callee.type === "MemberExpression" && callee.object?.type === "CallExpression") return; // derive mode from the full chain (handles any order like .skip.concurrent or .concurrent.skip) let mode = "run"; for (const prop of properties) if (prop === "skip" || prop === "only" || prop === "todo") mode = prop; else if (prop === "skipIf" || prop === "runIf") mode = "skip"; let isConcurrent = properties.includes("concurrent"); let isSequential = properties.includes("sequential"); let start; const end = node.end; // .each or (0, __vite_ssr_exports_0__.test)() if (callee.type === "CallExpression" || callee.type === "SequenceExpression" || callee.type === "TaggedTemplateExpression") start = callee.end; else start = node.start; const messageNode = node.arguments?.[0]; if (messageNode == null) { verbose?.(`Skipping node at ${node.start} because it doesn't have a name`); return; } let message; if (messageNode?.type === "Literal" || messageNode?.type === "TemplateLiteral") message = code.slice(messageNode.start + 1, messageNode.end - 1); else { message = code.slice(messageNode.start, messageNode.end); if (message.endsWith(".name")) message = message.slice(0, -5); } if (message.startsWith("0,")) message = message.slice(2); message = message.replace(/\(0\s?,\s?__vite_ssr_import_\d+__.(\w+)\)/g, "$1").replace(/__(vite_ssr_import|vi_import)_\d+__\./g, "").replace(/__vi_import_\d+__\./g, ""); const parentCalleeName = typeof callee?.callee === "object" && callee?.callee.type === "MemberExpression" && callee?.callee.property?.name; let isDynamicEach = parentCalleeName === "each" || parentCalleeName === "for"; if (!isDynamicEach && callee.type === "TaggedTemplateExpression") { const property = callee.tag?.property?.name; isDynamicEach = property === "each" || property === "for"; } // Extract options from the second argument if it's an options object const tags = []; const secondArg = node.arguments?.[1]; if (secondArg?.type === "ObjectExpression") for (const prop of secondArg.properties || []) { if (prop.type !== "Property" || prop.key?.type !== "Identifier") continue; const keyName = prop.key.name; if (keyName === "tags") { const tagsValue = prop.value; if (tagsValue?.type === "Literal" && typeof tagsValue.value === "string") tags.push(tagsValue.value); else if (tagsValue?.type === "ArrayExpression") { for (const element of tagsValue.elements || []) if (element?.type === "Literal" && typeof element.value === "string") tags.push(element.value); } } else if (prop.value?.type === "Literal" && prop.value.value === true) { if (keyName === "skip" || keyName === "only" || keyName === "todo") mode = keyName; else if (keyName === "concurrent") isConcurrent = true; else if (keyName === "sequential") isSequential = true; } } debug$1?.("Found", name, message, `(${mode})`, tags.length ? `[${tags.join(", ")}]` : ""); definitions.push({ start, end, name: message, type: isTestFunctionName(name) ? "test" : "suite", mode, task: null, dynamic: isDynamicEach, concurrent: isConcurrent, sequential: isSequential, tags }); } }); return { ast, definitions }; } function createFailedFileTask(project, filepath, error) { const testFilepath = relative(project.config.root, filepath); const file = { filepath, type: "suite", id: /* @__PURE__ */ generateHash$1(`${testFilepath}${project.config.name || ""}`), name: testFilepath, fullName: testFilepath, mode: "run", tasks: [], start: 0, end: 0, projectName: project.name, meta: {}, pool: project.browser ? "browser" : project.config.pool, file: null, result: { state: "fail", errors: serializeError(project, error) } }; file.file = file; return file; } function serializeError(ctx, error) { if ("errors" in error && "pluginCode" in error) return error.errors.map((e) => { return { name: error.name, message: e.text, stack: e.location ? `${error.name}: ${e.text}\n at ${relative(ctx.config.root, e.location.file)}:${e.location.line}:${e.location.column}` : "" }; }); return [{ name: error.name, stack: error.stack, message: error.message }]; } function createFileTask(testFilepath, code, requestMap, config, filepath, fileTags) { const { definitions, ast } = astParseFile(testFilepath, code); const file = { filepath, type: "suite", id: /* @__PURE__ */ generateHash$1(`${testFilepath}${config.name || ""}`), name: testFilepath, fullName: testFilepath, mode: "run", tasks: [], start: ast.start, end: ast.end, projectName: config.name, meta: {}, pool: "browser", file: null, tags: fileTags || [] }; file.file = file; const indexMap = createIndexLocationsMap(code); const map = requestMap && new TraceMap(requestMap); let lastSuite = file; const updateLatestSuite = (index) => { while (lastSuite.suite && lastSuite.end < index) lastSuite = lastSuite.suite; return lastSuite; }; definitions.sort((a, b) => a.start - b.start).forEach((definition) => { const latestSuite = updateLatestSuite(definition.start); let mode = definition.mode; if (latestSuite.mode !== "run") // inherit suite mode, if it's set mode = latestSuite.mode; const processedLocation = indexMap.get(definition.start); let location; if (map && processedLocation) { const originalLocation = originalPositionFor(map, { line: processedLocation.line, column: processedLocation.column }); if (originalLocation.column != null) { verbose?.(`Found location for`, definition.type, definition.name, `${processedLocation.line}:${processedLocation.column}`, "->", `${originalLocation.line}:${originalLocation.column}`); location = { line: originalLocation.line, column: originalLocation.column }; } else debug$1?.("Cannot find original location for", definition.type, definition.name, `${processedLocation.column}:${processedLocation.line}`); } else debug$1?.("Cannot find original location for", definition.type, definition.name, `${definition.start}`); const taskTags = unique([...latestSuite.tags || [], ...definition.tags]); // resolve concurrent/sequential: sequential cancels inherited concurrent const concurrent = definition.sequential ? void 0 : definition.concurrent || latestSuite.concurrent || void 0; if (definition.type === "suite") { const task = { type: definition.type, id: "", suite: latestSuite, file, tasks: [], mode, each: definition.dynamic, concurrent, name: definition.name, fullName: createTaskName([latestSuite.fullName, definition.name]), fullTestName: createTaskName([latestSuite.fullTestName, definition.name]), end: definition.end, start: definition.start, location, dynamic: definition.dynamic, meta: {}, tags: taskTags }; definition.task = task; latestSuite.tasks.push(task); lastSuite = task; return; } validateTags(config, taskTags); const task = { type: definition.type, id: "", suite: latestSuite, file, each: definition.dynamic, concurrent, mode, context: {}, name: definition.name, fullName: createTaskName([latestSuite.fullName, definition.name]), fullTestName: createTaskName([latestSuite.fullTestName, definition.name]), end: definition.end, start: definition.start, location, dynamic: definition.dynamic, meta: {}, timeout: 0, annotations: [], artifacts: [], tags: taskTags }; definition.task = task; latestSuite.tasks.push(task); }); calculateSuiteHash(file); const hasOnly = someTasksAreOnly(file); interpretTaskModes(file, config.testNamePattern, void 0, void 0, void 0, hasOnly, false, config.allowOnly); markDynamicTests(file.tasks); if (!file.tasks.length) file.result = { state: "fail", errors: [{ name: "Error", message: `No test suite found in file ${filepath}` }] }; return file; } async function astCollectTests(project, filepath) { const request = await transformSSR(project, filepath); const testFilepath = relative(project.config.root, filepath); if (!request) { debug$1?.("Cannot parse", testFilepath, "(vite didn't return anything)"); return createFailedFileTask(project, filepath, /* @__PURE__ */ new Error(`Failed to parse ${testFilepath}. Vite didn't return anything.`)); } return createFileTask(testFilepath, request.code, request.map, project.serializedConfig, filepath, request.fileTags); } async function transformSSR(project, filepath) { const { env: pragmaEnv, tags: fileTags } = detectCodeBlock(await promises.readFile(filepath, "utf-8").catch(() => "")); // Use environment from pragma if defined, otherwise fall back to config const environment = pragmaEnv || project.config.environment; const transformResult = await (environment === "jsdom" || environment === "happy-dom" ? project.vite.environments.client : project.vite.environments.ssr).transformRequest(filepath); return transformResult ? { ...transformResult, fileTags } : null; } function markDynamicTests(tasks) { for (const task of tasks) { if (task.dynamic) task.id += "-dynamic"; if ("tasks" in task) markDynamicTests(task.tasks); } } function escapeRegex(str) { return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); } const kReplacers = new Map([ ["%i", "\\d+?"], ["%#", "\\d+?"], ["%d", "[\\d.eE+-]+?"], ["%f", "[\\d.eE+-]+?"], ["%s", ".+?"], ["%j", ".+?"], ["%o", ".+?"], ["%%", "%"] ]); function escapeTestName(label, dynamic) { if (!dynamic) return escapeRegex(label); // Replace object access patterns ($value, $obj.a) with %s first let pattern = label.replace(/\$[a-z_.]+/gi, "%s"); pattern = escapeRegex(pattern); // Replace percent placeholders with their respective regex pattern = pattern.replace(/%[i#dfsjo%]/g, (m) => kReplacers.get(m) || m); return pattern; } class BrowserSessions { sessions = /* @__PURE__ */ new Map(); sessionIds = /* @__PURE__ */ new Set(); getSession(sessionId) { return this.sessions.get(sessionId); } destroySession(sessionId) { this.sessions.delete(sessionId); } createSession(sessionId, project, pool) { // this promise only waits for the WS connection with the orchestrator to be established const defer = createDefer(); const timeout = setTimeout(() => { defer.reject(/* @__PURE__ */ new Error(`Failed to connect to the browser session "${sessionId}" [${project.name}] within the timeout.`)); }, project.vitest.config.browser.connectTimeout ?? 6e4).unref(); this.sessions.set(sessionId, { project, connected: () => { defer.resolve(); clearTimeout(timeout); }, fail: (error) => { defer.resolve(); clearTimeout(timeout); pool.reject(error); } }); return defer; } } class FilesStatsCache { cache = /* @__PURE__ */ new Map(); getStats(key) { return this.cache.get(key); } async populateStats(root, specs) { const promises = specs.map((spec) => { const key = `${spec.project.name}:${relative(root, spec.moduleId)}`; return this.updateStats(spec.moduleId, key); }); await Promise.all(promises); } async updateStats(fsPath, key) { if (!fs.existsSync(fsPath)) return; const stats = await fs.promises.stat(fsPath); this.cache.set(key, { size: stats.size }); } removeStats(fsPath) { this.cache.forEach((_, key) => { if (key.endsWith(fsPath)) this.cache.delete(key); }); } } class ResultsCache { cache = /* @__PURE__ */ new Map(); workspacesKeyMap = /* @__PURE__ */ new Map(); cachePath = null; version; root = "/"; constructor(logger) { this.logger = logger; this.version = Vitest.version; } getCachePath() { return this.cachePath; } setConfig(root, config) { this.root = root; if (config) this.cachePath = resolve(config.dir, "results.json"); } getResults(key) { return this.cache.get(key); } async clearCache() { if (this.cachePath && existsSync(this.cachePath)) { await rm(this.cachePath, { force: true, recursive: true }); this.logger.log("[cache] cleared results cache at", this.cachePath); } } async readFromCache() { if (!this.cachePath) return; if (!fs.existsSync(this.cachePath)) return; const resultsCache = await fs.promises.readFile(this.cachePath, "utf8"); const { results, version } = JSON.parse(resultsCache || "[]"); const [major, minor] = version.split("."); // handling changed in 0.30.0 if (major > 0 || Number(minor) >= 30) { this.cache = new Map(results); this.version = version; results.forEach(([spec]) => { const [projectName, relativePath] = spec.split(":"); const keyMap = this.workspacesKeyMap.get(relativePath) || []; keyMap.push(projectName); this.workspacesKeyMap.set(relativePath, keyMap); }); } } updateResults(files) { files.forEach((file) => { const result = file.result; if (!result) return; const duration = result.duration || 0; // store as relative, so cache would be the same in CI and locally const relativePath = relative(this.root, file.filepath); this.cache.set(`${file.projectName || ""}:${relativePath}`, { duration: duration >= 0 ? duration : 0, failed: result.state === "fail" }); }); } removeFromCache(filepath) { this.cache.forEach((_, key) => { if (key.endsWith(filepath)) this.cache.delete(key); }); } async writeToCache() { if (!this.cachePath) return; const results = Array.from(this.cache.entries()); const cacheDirname = dirname(this.cachePath); if (!fs.existsSync(cacheDirname)) await fs.promises.mkdir(cacheDirname, { recursive: true }); const cache = JSON.stringify({ version: this.version, results }); await fs.promises.writeFile(this.cachePath, cache); } } class VitestCache { results; stats = new FilesStatsCache(); constructor(logger) { this.results = new ResultsCache(logger); } getFileTestResults(key) { return this.results.getResults(key); } getFileStats(key) { return this.stats.getStats(key); } static resolveCacheDir(root, dir, projectName) { return resolve(root, slash(dir || "node_modules/.vite"), "vitest", hash("sha1", projectName || "", "hex")); } } const debugFs = createDebugger("vitest:cache:fs"); const debugMemory = createDebugger("vitest:cache:memory"); const cacheComment = "\n//# vitestCache="; const cacheCommentLength = 17; const METADATA_FILE = "_metadata.json"; const parallelFsCacheRead = /* @__PURE__ */ new Map(); /** * @experimental */ class FileSystemModuleCache { /** * Even though it's possible to override the folder of project's caches * We still keep a single metadata file for all projects because * - they can reference files between each other * - lockfile changes are reflected for the whole workspace, not just for a single project */ rootCache; metadataFilePath; version = "1.0.0-beta.4"; fsCacheRoots = /* @__PURE__ */ new WeakMap(); fsEnvironmentHashMap = /* @__PURE__ */ new WeakMap(); fsCacheKeyGenerators = /* @__PURE__ */ new Set(); // this exists only to avoid the perf. cost of reading a file and generating a hash again // surprisingly, on some machines this has negligible effect fsCacheKeys = /* @__PURE__ */ new WeakMap(); constructor(vitest) { this.vitest = vitest; const workspaceRoot = searchForWorkspaceRoot(vitest.vite.config.root); this.rootCache = vitest.config.experimental.fsModuleCachePath || join(workspaceRoot, "node_modules", ".experimental-vitest-cache"); this.metadataFilePath = join(this.rootCache, METADATA_FILE); } defineCacheKeyGenerator(callback) { this.fsCacheKeyGenerators.add(callback); } async clearCache(log = true) { const fsCachePaths = this.vitest.projects.map((r) => { return r.config.experimental.fsModuleCachePath || this.rootCache; }); const uniquePaths = Array.from(new Set(fsCachePaths)); await Promise.all(uniquePaths.map((directory) => rm(directory, { force: true, recursive: true }))); if (log) this.vitest.logger.log(`[cache] cleared fs module cache at ${uniquePaths.join(", ")}`); } readCachedFileConcurrently(cachedFilePath) { if (!parallelFsCacheRead.has(cachedFilePath)) parallelFsCacheRead.set(cachedFilePath, readFile(cachedFilePath, "utf-8").then((code) => { const matchIndex = code.lastIndexOf(cacheComment); if (matchIndex === -1) { debugFs?.(`${c.red("[empty]")} ${cachedFilePath} exists, but doesn't have a ${cacheComment} comment, transforming by vite instead`); return; } return { code, meta: this.fromBase64(code.slice(matchIndex + cacheCommentLength)) }; }).finally(() => { parallelFsCacheRead.delete(cachedFilePath); })); return parallelFsCacheRead.get(cachedFilePath); } async getCachedModule(cachedFilePath) { if (!existsSync(cachedFilePath)) { debugFs?.(`${c.red("[empty]")} ${cachedFilePath} doesn't exist, transforming by vite first`); return; } const fileResult = await this.readCachedFileConcurrently(cachedFilePath); if (!fileResult) return; const { code, meta } = fileResult; debugFs?.(`${c.green("[read]")} ${meta.id} is cached in ${cachedFilePath}`); return { id: meta.id, url: meta.url, file: meta.file, code, importedUrls: meta.importedUrls, mappings: meta.mappings }; } async saveCachedModule(cachedFilePath, fetchResult, importedUrls = [], mappings = false) { if ("code" in fetchResult) { const result = { file: fetchResult.file, id: fetchResult.id, url: fetchResult.url, importedUrls, mappings }; debugFs?.(`${c.yellow("[write]")} ${fetchResult.id} is cached in ${cachedFilePath}`); await atomicWriteFile(cachedFilePath, `${fetchResult.code}${cacheComment}${this.toBase64(result)}`); } } toBase64(obj) { const json = stringify(obj); return Buffer.from(json).toString("base64"); } fromBase64(obj) { return parse(Buffer.from(obj, "base64").toString("utf-8")); } invalidateCachePath(environment, id) { debugFs?.(`cache for ${id} in ${environment.name} environment is invalidated`); this.fsCacheKeys.get(environment)?.delete(id); } invalidateAllCachePaths(environment) { debugFs?.(`the ${environment.name} environment cache is invalidated`); this.fsCacheKeys.get(environment)?.clear(); } getMemoryCachePath(environment, id) { const result = this.fsCacheKeys.get(environment)?.get(id); if (result != null) debugMemory?.(`${c.green("[read]")} ${id} was cached in ${result}`); else if (result === null) debugMemory?.(`${c.green("[read]")} ${id} was bailed out`); return result; } generateCachePath(vitestConfig, environment, id, fileContent) { // bail out if file has import.meta.glob because it depends on other files // TODO: figure out a way to still support it if (fileContent.includes("import.meta.glob(")) { this.saveMemoryCache(environment, id, null); debugMemory?.(`${c.yellow("[write]")} ${id} was bailed out because it has "import.meta.glob"`); return null; } let hashString = ""; for (const generator of this.fsCacheKeyGenerators) { const result = generator({ environment, id, sourceCode: fileContent }); if (typeof result === "string") hashString += result; if (result === false) { this.saveMemoryCache(environment, id, null); debugMemory?.(`${c.yellow("[write]")} ${id} was bailed out by a custom generator`); return null; } } const config = environment.config; // coverage provider is dynamic, so we also clear the whole cache if // vitest.enableCoverage/vitest.disableCoverage is called const coverageAffectsCache = String(this.vitest.config.coverage.enabled && this.vitest.coverageProvider?.requiresTransform?.(id)); let cacheConfig = this.fsEnvironmentHashMap.get(environment); if (!cacheConfig) { cacheConfig = JSON.stringify({ root: config.root, base: config.base, mode: config.mode, consumer: config.consumer, resolve: config.resolve, plugins: config.plugins.filter((p) => p.api?.vitest?.experimental?.ignoreFsModuleCache !== true).map((p) => p.name), configFileDependencies: config.configFileDependencies.map((file) => tryReadFileSync(file)), environment: environment.name, css: vitestConfig.css }, (_, value) => { if (typeof value === "function" || value instanceof RegExp) return value.toString(); return value; }); this.fsEnvironmentHashMap.set(environment, cacheConfig); } hashString += id + fileContent + (process.env.NODE_ENV ?? "") + this.version + cacheConfig + coverageAffectsCache; const cacheKey = hash("sha1", hashString, "hex"); let cacheRoot = this.fsCacheRoots.get(vitestConfig); if (cacheRoot == null) { cacheRoot = vitestConfig.experimental.fsModuleCachePath || this.rootCache; this.fsCacheRoots.set(vitestConfig, cacheRoot); if (!existsSync(cacheRoot)) mkdirSync(cacheRoot, { recursive: true }); } const fsResultPath = join(cacheRoot, cacheKey); debugMemory?.(`${c.yellow("[write]")} ${id} generated a cache in ${fsResultPath}`); this.saveMemoryCache(environment, id, fsResultPath); return fsResultPath; } saveMemoryCache(environment, id, cache) { let environmentKeys = this.fsCacheKeys.get(environment); if (!environmentKeys) { environmentKeys = /* @__PURE__ */ new Map(); this.fsCacheKeys.set(environment, environmentKeys); } environmentKeys.set(id, cache); } async readMetadata() { // metadata is shared between every projects in the workspace, so we ignore project's fsModuleCachePath if (!existsSync(this.metadataFilePath)) return; try { const content = await readFile(this.metadataFilePath, "utf-8"); return JSON.parse(content); } catch {} } // before vitest starts running tests, we check that the lockfile wasn't updated // if it was, we nuke the previous cache in case a custom plugin was updated // or a new version of vite/vitest is installed // for the same reason we also cache config file content, but that won't catch changes made in external plugins async ensureCacheIntegrity() { if (![this.vitest.getRootProject(), ...this.vitest.projects].some((p) => p.config.experimental.fsModuleCache)) return; const metadata = await this.readMetadata(); const currentLockfileHash = getLockfileHash(this.vitest.vite.config.root); // no metadata found, just store a new one, don't reset the cache if (!metadata) { if (!existsSync(this.rootCache)) mkdirSync(this.rootCache, { recursive: true }); debugFs?.(`fs metadata file was created with hash ${currentLockfileHash}`); await writeFile(this.metadataFilePath, JSON.stringify({ lockfileHash: currentLockfileHash }, null, 2), "utf-8"); return; } // if lockfile didn't change, don't do anything if (metadata.lockfileHash === currentLockfileHash) return; // lockfile changed, let's clear all caches await this.clearCache(false); this.vitest.vite.config.logger.info(`fs cache was cleared because lockfile has changed`, { timestamp: true, environment: c.yellow("[vitest]") }); debugFs?.(`fs cache was cleared because lockfile has changed`); } } /** * Performs an atomic write operation using the write-then-rename pattern. * * Why we need this: * - Ensures file integrity by never leaving partially written files on disk * - Prevents other processes from reading incomplete data during writes * - Particularly important for test files where incomplete writes could cause test failures * * The implementation writes to a temporary file first, then renames it to the target path. * This rename operation is atomic on most filesystems (including POSIX-compliant ones), * guaranteeing that other processes will only ever see the complete file. * * Added in https://github.com/vitest-dev/vitest/pull/7531 */ async function atomicWriteFile(realFilePath, data) { const tmpFilePath = join(dirname(realFilePath), `.tmp-${Date.now()}-${Math.random().toString(36).slice(2)}`); try { await writeFile(tmpFilePath, data, "utf-8"); await rename(tmpFilePath, realFilePath); } finally { try { if (await stat(tmpFilePath)) await unlink(tmpFilePath); } catch {} } } // lockfile hash resolution taken from vite // since this is experimental, we don't ask to expose it const lockfileFormats = [ { path: "node_modules/.package-lock.json", checkPatchesDir: "patches", manager: "npm" }, { path: "node_modules/.yarn-state.yml", checkPatchesDir: false, manager: "yarn" }, { path: ".pnp.cjs", checkPatchesDir: ".yarn/patches", manager: "yarn" }, { path: ".pnp.js", checkPatchesDir: ".yarn/patches", manager: "yarn" }, { path: "node_modules/.yarn-integrity", checkPatchesDir: "patches", manager: "yarn" }, { path: "node_modules/.pnpm/lock.yaml", checkPatchesDir: false, manager: "pnpm" }, { path: ".rush/temp/shrinkwrap-deps.json", checkPatchesDir: false, manager: "pnpm" }, { path: "bun.lock", checkPatchesDir: "patches", manager: "bun" }, { path: "bun.lockb", checkPatchesDir: "patches", manager: "bun" } ].sort((_, { manager }) => { return process.env.npm_config_user_agent?.startsWith(manager) ? 1 : -1; }); const lockfilePaths = lockfileFormats.map((l) => l.path); function getLockfileHash(root) { const lockfilePath = lookupFile(root, lockfilePaths); let content = lockfilePath ? fs.readFileSync(lockfilePath, "utf-8") : ""; if (lockfilePath) { const normalizedLockfilePath = lockfilePath.replaceAll("\\", "/"); const lockfileFormat = lockfileFormats.find((f) => normalizedLockfilePath.endsWith(f.path)); if (lockfileFormat.checkPatchesDir) { const stat = tryStatSync(join(lockfilePath.slice(0, -lockfileFormat.path.length), lockfileFormat.checkPatchesDir)); if (stat?.isDirectory()) content += stat.mtimeMs.toString(); } } return hash("sha256", content, "hex").substring(0, 8).padEnd(8, "_"); } function lookupFile(dir, fileNames) { while (dir) { for (const fileName of fileNames) { const fullPath = join(dir, fileName); if (tryStatSync(fullPath)?.isFile()) return fullPath; } const parentDir = dirname(dir); if (parentDir === dir) return; dir = parentDir; } } function tryReadFileSync(file) { try { return readFileSync(file, "utf-8"); } catch { return ""; } } function tryStatSync(file) { try { // The "throwIfNoEntry" is a performance optimization for cases where the file does not exist return fs.statSync(file, { throwIfNoEntry: false }); } catch {} } // this is copy pasted from vite function normalizeResolvedIdToUrl(environment, resolvedId) { const root = environment.config.root; const depsOptimizer = environment.depsOptimizer; let url; // normalize all imports into resolved URLs // e.g. `import 'foo'` -> `import '/@fs/.../node_modules/foo/index.js'` if (resolvedId.startsWith(withTrailingSlash(root))) // in root: infer short absolute path from root url = resolvedId.slice(root.length); else if (depsOptimizer?.isOptimizedDepFile(resolvedId) || resolvedId !== "/@react-refresh" && path.isAbsolute(resolvedId) && existsSync(cleanUrl(resolvedId))) // an optimized deps may not yet exists in the filesystem, or // a regular file exists but is out of root: rewrite to absolute /@fs/ paths url = path.posix.join("/@fs/", resolvedId); else url = resolvedId; // if the resolved id is not a valid browser import specifier, // prefix it to make it valid. We will strip this before feeding it // back into the transform pipeline if (url[0] !== "." && url[0] !== "/") url = wrapId(resolvedId); return url; } const saveCachePromises = /* @__PURE__ */ new Map(); const readFilePromises = /* @__PURE__ */ new Map(); class ModuleFetcher { tmpDirectories = /* @__PURE__ */ new Set(); fsCacheEnabled; constructor(resolver, config, fsCache, tmpProjectDir) { this.resolver = resolver; this.config = config; this.fsCache = fsCache; this.tmpProjectDir = tmpProjectDir; this.fsCacheEnabled = config.experimental?.fsModuleCache === true; } async fetch(trace, url, importer, environment, makeTmpCopies, options) { if (url.startsWith("data:")) { trace.setAttribute("vitest.module.external", url); return { externalize: url, type: "builtin" }; } if (url === "/@vite/client" || url === "@vite/client") { trace.setAttribute("vitest.module.external", url); return { externalize: "/@vite/client", type: "module" }; } const isFileUrl = url.startsWith("file://"); if (isExternalUrl(url) && !isFileUrl) { trace.setAttribute("vitest.module.external", url); return { externalize: url, type: "network" }; } // handle unresolved id of dynamic import skipped by Vite import analysis if (url[0] !== "/") { const resolved = await environment.pluginContainer.resolveId(url, importer); if (resolved) url = normalizeResolvedIdToUrl(environment, resolved.id); } const moduleGraphModule = await environment.moduleGraph.ensureEntryFromUrl(unwrapId(url)); const cached = !!moduleGraphModule.transformResult; if (moduleGraphModule.file) trace.setAttribute("code.file.path", moduleGraphModule.file); if (options?.cached && cached) return { cache: true }; const externalize = await this.resolver.shouldExternalize(moduleGraphModule.id); if (externalize) return { externalize, type: "module" }; const cachePath = await this.getCachePath(environment, moduleGraphModule); // full fs caching is disabled, but we still want to keep tmp files if makeTmpCopies is enabled // this is primarily used by the forks pool to avoid using process.send(bigBuffer) if (cachePath == null) { const result = await this.fetchAndProcess(environment, url, importer, moduleGraphModule, options); this.recordResult(trace, result); if (!makeTmpCopies || !("code" in result)) return result; const transformResult = moduleGraphModule.transformResult; const tmpPath = transformResult && Reflect.get(transformResult, "_vitest_tmp"); if (typeof tmpPath === "string") return getCachedResult(result, tmpPath); const tmpDir = join(this.tmpProjectDir, environment.name); if (!this.tmpDirectories.has(tmpDir)) { if (!existsSync(tmpDir)) mkdirSync(tmpDir, { recursive: true }); this.tmpDirectories.add(tmpDir); } const tmpFile = join(tmpDir, hash("sha1", result.id, "hex")); return this.cacheResult(result, tmpFile).then((result) => { if (transformResult) Reflect.set(transformResult, "_vitest_tmp", tmpFile); return result; }); } if (saveCachePromises.has(cachePath)) return saveCachePromises.get(cachePath).then((result) => { this.recordResult(trace, result); return result; }); const cachedModule = await this.getCachedModule(cachePath, environment, moduleGraphModule, importer); if (cachedModule) { this.recordResult(trace, cachedModule); return cachedModule; } const result = await this.fetchAndProcess(environment, url, importer, moduleGraphModule, options); const importedUrls = this.getSerializedImports(moduleGraphModule); const map = moduleGraphModule.transformResult?.map; const mappings = map && !("version" in map) && map.mappings === ""; return this.cacheResult(result, cachePath, importedUrls, !!mappings); } // we need this for UI to be able to show a module graph getSerializedImports(node) { const imports = []; node.importedModules.forEach((importer) => { imports.push(importer.url); }); return imports; } recordResult(trace, result) { if ("externalize" in result) trace.setAttributes({ "vitest.fetched_module.external": result.externalize, "vitest.fetched_module.type": result.type }); if ("id" in result) { trace.setAttributes({ "vitest.fetched_module.invalidate": result.invalidate, "vitest.fetched_module.id": result.id, "vitest.fetched_module.url": result.url, "vitest.fetched_module.cache": false }); if (result.file) trace.setAttribute("code.file.path", result.file); } if ("code" in result) trace.setAttribute("vitest.fetched_module.code_length", result.code.length); } async getCachePath(environment, moduleGraphModule) { if (!this.fsCacheEnabled) return null; const moduleId = moduleGraphModule.id; const memoryCacheKey = this.fsCache.getMemoryCachePath(environment, moduleId); // undefined means there is no key in memory // null means the file should not be cached if (memoryCacheKey !== void 0) return memoryCacheKey; const fileContent = await this.readFileContentToCache(environment, moduleGraphModule); return this.fsCache.generateCachePath(this.config, environment, moduleGraphModule.id, fileContent); } async readFileContentToCache(environment, moduleGraphModule) { if (moduleGraphModule.file && !moduleGraphModule.file.startsWith("\0") && !moduleGraphModule.file.startsWith("virtual:")) { const result = await this.readFileConcurrently(moduleGraphModule.file); if (result != null) return result; } const loadResult = await environment.pluginContainer.load(moduleGraphModule.id); if (typeof loadResult === "string") return loadResult; if (loadResult != null) return loadResult.code; return ""; } async getCachedModule(cachePath, environment, moduleGraphModule, importer) { if (moduleGraphModule.transformResult?.__vitestTmp) return { cached: true, file: moduleGraphModule.file, id: moduleGraphModule.id, tmp: moduleGraphModule.transformResult.__vitestTmp, url: moduleGraphModule.url, invalidate: false }; const cachedModule = await this.fsCache.getCachedModule(cachePath); if (!cachedModule) return; // keep the module graph in sync let map = extractSourceMap(cachedModule.code); if (map && cachedModule.file) map.file = cachedModule.file; // mappings is a special source map identifier in rollup if (!map && cachedModule.mappings) map = { mappings: "" }; moduleGraphModule.transformResult = { code: cachedModule.code, map, ssr: true, __vitestTmp: cachePath }; // we populate the module graph to make the watch mode work because it relies on importers if (importer) { const environmentNode = environment.moduleGraph.getModuleById(importer); if (environmentNode) moduleGraphModule.importers.add(environmentNode); } await Promise.all(cachedModule.importedUrls.map(async (url) => { const moduleNode = await environment.moduleGraph.ensureEntryFromUrl(url).catch(() => null); if (moduleNode) { moduleNode.importers.add(moduleGraphModule); moduleGraphModule.importedModules.add(moduleNode); } })); return { cached: true, file: cachedModule.file, id: cachedModule.id, tmp: cachePath, url: cachedModule.url, invalidate: false }; } async fetchAndProcess(environment, url, importer, moduleGraphModule, options) { return processResultSource(environment, await fetchModule(environment, url, importer, { ...options, inlineSourceMap: false }).catch(handleRollupError)); } async cacheResult(result, cachePath, importedUrls = [], mappings = false) { const returnResult = "code" in result ? getCachedResult(result, cachePath) : result; if (saveCachePromises.has(cachePath)) { await saveCachePromises.get(cachePath); return returnResult; } const savePromise = this.fsCache.saveCachedModule(cachePath, result, importedUrls, mappings).then(() => result).finally(() => { saveCachePromises.delete(cachePath); }); saveCachePromises.set(cachePath, savePromise); await savePromise; return returnResult; } readFileConcurrently(file) { if (!readFilePromises.has(file)) readFilePromises.set( file, // virtual file can have a "file" property readFile(file, "utf-8").catch(() => null).finally(() => { readFilePromises.delete(file); }) ); return readFilePromises.get(file); } } function createFetchModuleFunction(resolver, config, fsCache, traces, tmpProjectDir) { const fetcher = new ModuleFetcher(resolver, config, fsCache, tmpProjectDir); return async (url, importer, environment, cacheFs, options, otelCarrier) => { await traces.waitInit(); const context = otelCarrier ? traces.getContextFromCarrier(otelCarrier) : void 0; return traces.$("vitest.module.transform", context ? { context } : {}, (span) => fetcher.fetch(span, url, importer, environment, cacheFs, options)); }; } let SOURCEMAPPING_URL = "sourceMa"; SOURCEMAPPING_URL += "ppingURL"; const MODULE_RUNNER_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-generated"; function processResultSource(environment, result) { if (!("code" in result)) return result; const node = environment.moduleGraph.getModuleById(result.id); if (node?.transformResult) // this also overrides node.transformResult.code which is also what the module // runner does under the hood by default (we disable source maps inlining) inlineSourceMap(node.transformResult); return { ...result, code: node?.transformResult?.code || result.code }; } const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm"); // we have to inline the source map ourselves, because // - we don't need //# sourceURL since we are running code in VM // - important in stack traces and the V8 coverage // - we need to inject an empty line for --inspect-brk function inlineSourceMap(result) { const map = result.map; let code = result.code; if (!map || !("version" in map) || code.includes(MODULE_RUNNER_SOURCEMAPPING_SOURCE)) return result; // to reduce the payload size, we only inline vite node source map, because it's also the only one we use OTHER_SOURCE_MAP_REGEXP.lastIndex = 0; if (OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, ""); const sourceMap = { ...map }; // If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0]) // so that debuggers can be set to break on first line if (sourceMap.mappings[0] === ";") sourceMap.mappings = `AAAA,CAAA${sourceMap.mappings}`; result.code = `${code.trimEnd()}\n${MODULE_RUNNER_SOURCEMAPPING_SOURCE}\n//# ${SOURCEMAPPING_URL}=${genSourceMapUrl(sourceMap)}\n`; return result; } function genSourceMapUrl(map) { if (typeof map !== "string") map = JSON.stringify(map); return `data:application/json;base64,${Buffer.from(map).toString("base64")}`; } function getCachedResult(result, tmp) { return { cached: true, file: result.file, id: result.id, tmp, url: result.url, invalidate: result.invalidate }; } const MODULE_RUNNER_SOURCEMAPPING_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json;base64,(.+)`); function extractSourceMap(code) { const pattern = `//# ${SOURCEMAPPING_URL}=data:application/json;base64,`; const lastIndex = code.lastIndexOf(pattern); if (lastIndex === -1) return null; const mapString = MODULE_RUNNER_SOURCEMAPPING_REGEXP.exec(code.slice(lastIndex))?.[1]; if (!mapString) return null; const sourceMap = JSON.parse(Buffer.from(mapString, "base64").toString("utf-8")); // remove source map mapping added by "inlineSourceMap" to keep the original behaviour of transformRequest if (sourceMap.mappings.startsWith("AAAA,CAAA;")) // 9 because we want to only remove "AAAA,CAAA", but keep ; at the start sourceMap.mappings = sourceMap.mappings.slice(9); return sourceMap; } // serialize rollup error on server to preserve details as a test error function handleRollupError(e) { if (e instanceof Error && ("plugin" in e || "frame" in e || "id" in e)) // eslint-disable-next-line no-throw-literal throw { name: e.name, message: e.message, stack: e.stack, cause: e.cause, __vitest_rollup_error__: { plugin: e.plugin, id: e.id, loc: e.loc, frame: e.frame } }; throw e; } class ServerModuleRunner extends ModuleRunner { constructor(environment, fetcher, config) { super({ hmr: false, transport: { async invoke(event) { if (event.type !== "custom") throw new Error(`Vitest Module Runner doesn't support Vite HMR events.`); const { name, data } = event.data; if (name === "getBuiltins") return await environment.hot.handleInvoke(event); if (name !== "fetchModule") return { error: /* @__PURE__ */ new Error(`Unknown method: ${name}. Expected "fetchModule".`) }; try { const result = await fetcher(data[0], data[1], environment, false, data[2]); if ("tmp" in result) { const code = await readFile(result.tmp); return { result: { ...result, code } }; } return { result }; } catch (error) { return { error }; } } } }, new VitestModuleEvaluator()); this.environment = environment; this.config = config; } async import(rawId) { const resolved = await this.environment.pluginContainer.resolveId(rawId, this.config.root); if (!resolved) return super.import(rawId); // Vite will make "@vitest/coverage-v8" into "@vitest/coverage-v8.js" url // instead of using an actual file path-like URL, so we resolve it here first const url = normalizeResolvedIdToUrl(this.environment, resolved.id); return super.import(url); } } class FilesNotFoundError extends Error { code = "VITEST_FILES_NOT_FOUND"; constructor(mode) { super(`No ${mode} files found`); } } class GitNotFoundError extends Error { code = "VITEST_GIT_NOT_FOUND"; constructor() { super("Could not find Git root. Have you initialized git wi