cspell
Version:
A Spelling Checker for Code!
1,473 lines (1,450 loc) • 93.9 kB
JavaScript
import { createRequire } from "node:module";
import chalk, { Chalk } from "chalk";
import { isAsyncIterable, opFilter, opMap, opTap, operators, pipeAsync, pipeAsync as asyncPipe, toAsyncIterable, toAsyncIterable as mergeAsyncIterables } from "@cspell/cspell-pipe";
import * as cspell from "cspell-lib";
import { ENV_CSPELL_GLOB_ROOT, IncludeExcludeFlag, SuggestionError, Text, checkTextDocument, combineTextAndLanguageSettings, createPerfTimer, extractDependencies, extractImportErrors, fileToDocument, getDefaultSettings, getDictionary, getGlobalSettingsAsync, getSystemFeatureFlags, isBinaryFile, isSpellingDictionaryLoadError, mergeSettings, setLogger, shouldCheckDocument, spellCheckDocument, suggestionsForWords, traceWordsAsync } from "cspell-lib";
import assert from "node:assert";
import { format, formatWithOptions } from "node:util";
import { isUrlLike, toFileDirURL, toFilePathOrHref, toFileURL, urlRelative } from "@cspell/url";
import { makeTemplate } from "chalk-template";
import fs, { stat } from "node:fs/promises";
import { MutableCSpellConfigFile, createReaderWriter, cspellConfigFileSchema, isCfgArrayNode } from "cspell-config-lib";
import { promises } from "node:fs";
import { fileURLToPath } from "node:url";
import * as path$1 from "node:path";
import path, { isAbsolute, posix, relative, resolve, sep } from "node:path";
import { opMap as opMap$1, pipe } from "@cspell/cspell-pipe/sync";
import { IssueType, MessageTypes, unknownWordsChoices } from "@cspell/cspell-types";
import { _debug } from "cspell-dictionary";
import { GitIgnore, findRepoRoot } from "cspell-gitignore";
import { GlobMatcher, fileOrGlobToGlob, workaroundPicomatchBug } from "cspell-glob";
import crypto from "node:crypto";
import streamConsumers from "node:stream/consumers";
import { readFileText, toURL } from "cspell-io";
import { glob } from "tinyglobby";
import * as readline from "node:readline";
import { parse, stringify } from "flatted";
import { dynamicImport } from "@cspell/dynamic-import";
//#region src/console.ts
var ImplChannel = class {
constructor(stream) {
this.stream = stream;
}
write = (msg) => this.stream.write(msg);
writeLine = (msg) => this.write(msg + "\n");
clearLine = (dir, callback) => this.stream.clearLine?.(dir, callback) ?? false;
printLine = (...params) => this.writeLine(params.length && formatWithOptions({ colors: this.stream.hasColors?.() }, ...params) || "");
getColorLevel = () => getColorLevel(this.stream);
};
var Console = class {
stderrChannel;
stdoutChannel;
constructor(stdout = process.stdout, stderr = process.stderr) {
this.stdout = stdout;
this.stderr = stderr;
this.stderrChannel = new ImplChannel(this.stderr);
this.stdoutChannel = new ImplChannel(this.stdout);
}
log = (...p) => this.stdoutChannel.printLine(...p);
error = (...p) => this.stderrChannel.printLine(...p);
info = this.log;
warn = this.error;
};
const console = new Console();
function getColorLevel(stream) {
const depth = stream.getColorDepth?.() || 0;
switch (depth) {
case 1: return 1;
case 4: return 2;
case 24: return 3;
default: return 0;
}
}
//#endregion
//#region src/util/errors.ts
var CheckFailed = class extends Error {
constructor(message, exitCode = 1) {
super(message);
this.exitCode = exitCode;
}
};
var ApplicationError = class extends Error {
constructor(message, exitCode = 1, cause) {
super(message);
this.exitCode = exitCode;
this.cause = cause;
}
};
var IOError = class extends ApplicationError {
constructor(message, cause) {
super(message, void 0, cause);
this.cause = cause;
}
get code() {
return this.cause.code;
}
isNotFound() {
return this.cause.code === "ENOENT";
}
};
function toError(e) {
if (isError(e)) return e;
if (isErrorLike(e)) {
const ex = new Error(e.message, { cause: e });
if (e.code !== void 0) ex.code = e.code;
return ex;
}
const message = format(e);
return new Error(message);
}
function isError(e) {
return e instanceof Error;
}
function isErrorLike(e) {
if (e instanceof Error) return true;
if (!e || typeof e !== "object") return false;
const ex = e;
return typeof ex.message === "string";
}
function toApplicationError(e, message) {
if (e instanceof ApplicationError && !message) return e;
const err = toError(e);
return new ApplicationError(message ?? err.message, void 0, err);
}
//#endregion
//#region src/util/util.ts
const uniqueFn = uniqueFilterFnGenerator;
function uniqueFilterFnGenerator(extractFn) {
const values = /* @__PURE__ */ new Set();
const extractor = extractFn || ((a) => a);
return (v) => {
const vv = extractor(v);
const ret = !values.has(vv);
values.add(vv);
return ret;
};
}
/**
* Removed all properties with a value of `undefined` from the object.
* @param src - the object to clean.
* @returns the same object with all properties with a value of `undefined` removed.
*/
function clean(src) {
const r = src;
for (const key of Object.keys(r)) if (r[key] === void 0) delete r[key];
return r;
}
//#endregion
//#region src/cli-reporter.ts
const templateIssue = `{green $filename}:{yellow $row:$col} - $message ({red $text}) $quickFix`;
const templateIssueNoFix = `{green $filename}:{yellow $row:$col} - $message ({red $text})`;
const templateIssueWithSuggestions = `{green $filename}:{yellow $row:$col} - $message ({red $text}) Suggestions: {yellow [$suggestions]}`;
const templateIssueWithContext = `{green $filename}:{yellow $row:$col} $padRowCol- $message ({red $text})$padContext -- {gray $contextLeft}{red {underline $text}}{gray $contextRight}`;
const templateIssueWithContextWithSuggestions = `{green $filename}:{yellow $row:$col} $padRowCol- $message ({red $text})$padContext -- {gray $contextLeft}{red {underline $text}}{gray $contextRight}\n\t Suggestions: {yellow [$suggestions]}`;
const templateIssueLegacy = `{green $filename}[$row, $col]: $message: {red $text}`;
const templateIssueWordsOnly = "$text";
assert(true);
/**
*
* @param template - The template to use for the issue.
* @param uniqueIssues - If true, only unique issues will be reported.
* @param reportedIssuesCollection - optional collection to store reported issues.
* @returns issueEmitter function
*/
function genIssueEmitter(stdIO, errIO, template, uniqueIssues, reportedIssuesCollection) {
const uniqueFilter = uniqueIssues ? uniqueFilterFnGenerator((issue) => issue.text) : () => true;
const defaultWidth = 10;
let maxWidth = defaultWidth;
let uri;
return function issueEmitter(issue) {
if (!uniqueFilter(issue)) return;
if (uri !== issue.uri) {
maxWidth = defaultWidth;
uri = issue.uri;
}
maxWidth = Math.max(maxWidth * .999, issue.text.length, 10);
const issueText = formatIssue(stdIO, template, issue, Math.ceil(maxWidth));
reportedIssuesCollection?.push(formatIssue(errIO, template, issue, Math.ceil(maxWidth)));
stdIO.writeLine(issueText);
};
}
function nullEmitter() {}
function relativeUriFilename(uri, rootURL) {
const url = toFileURL(uri);
const rel = urlRelative(rootURL, url);
if (rel.startsWith("..")) return toFilePathOrHref(url);
return rel;
}
function reportProgress(io, p, cwdURL, options) {
if (p.type === "ProgressFileComplete") return reportProgressFileComplete(io, p, cwdURL, options);
if (p.type === "ProgressFileBegin") return reportProgressFileBegin(io, p, cwdURL);
}
function determineFilename(io, p, cwd) {
const fc = "" + p.fileCount;
const fn = (" ".repeat(fc.length) + p.fileNum).slice(-fc.length);
const idx = fn + "/" + fc;
const filename = io.chalk.gray(relativeUriFilename(p.filename, cwd));
return {
idx,
filename
};
}
function reportProgressFileBegin(io, p, cwdURL) {
const { idx, filename } = determineFilename(io, p, cwdURL);
if (io.getColorLevel() > 0) {
io.clearLine?.(0);
io.write(`${idx} ${filename}\r`);
}
}
function reportProgressFileComplete(io, p, cwd, options) {
const { idx, filename } = determineFilename(io, p, cwd);
const { verbose, debug } = options;
const time = reportTime(io, p.elapsedTimeMs, !!p.cached);
const skipped = p.processed === false ? " skipped" : "";
const hasErrors = p.numErrors ? io.chalk.red` X` : "";
const newLine = skipped && (verbose || debug) || hasErrors || isSlow(p.elapsedTimeMs) || io.getColorLevel() < 1 ? "\n" : "";
const msg = `${idx} ${filename} ${time}${skipped}${hasErrors}${newLine || "\r"}`;
io.write(msg);
}
function reportTime(io, elapsedTimeMs, cached) {
if (cached) return io.chalk.green("cached");
if (elapsedTimeMs === void 0) return "-";
const slow = isSlow(elapsedTimeMs);
const color = !slow ? io.chalk.white : slow === 1 ? io.chalk.yellow : io.chalk.redBright;
return color(elapsedTimeMs.toFixed(2) + "ms");
}
function isSlow(elapsedTmeMs) {
if (!elapsedTmeMs || elapsedTmeMs < 1e3) return 0;
if (elapsedTmeMs < 2e3) return 1;
return 2;
}
function getReporter(options, config) {
const perfStats = {
filesProcessed: 0,
filesSkipped: 0,
filesCached: 0,
elapsedTimeMs: 0,
perf: Object.create(null)
};
const noColor = options.color === false;
const forceColor = options.color === true;
const uniqueIssues = config?.unique || false;
const defaultIssueTemplate = options.wordsOnly ? templateIssueWordsOnly : options.legacy ? templateIssueLegacy : options.showContext ? options.showSuggestions ? templateIssueWithContextWithSuggestions : templateIssueWithContext : options.showSuggestions ? templateIssueWithSuggestions : options.showSuggestions === false ? templateIssueNoFix : templateIssue;
const { fileGlobs, silent, summary, issues, progress: showProgress, verbose, debug } = options;
const issueTemplate = config?.issueTemplate || defaultIssueTemplate;
assertCheckTemplate(issueTemplate);
const console$1 = config?.console || console;
const colorLevel = noColor ? 0 : forceColor ? 2 : console$1.stdoutChannel.getColorLevel();
const stdio = {
...console$1.stdoutChannel,
chalk: new Chalk({ level: colorLevel })
};
const stderr = {
...console$1.stderrChannel,
chalk: new Chalk({ level: colorLevel })
};
const consoleError = (msg) => stderr.writeLine(msg);
function createInfoLog(wrap) {
return (msg) => console$1.info(wrap(msg));
}
const emitters = {
Debug: !silent && debug ? createInfoLog(stdio.chalk.cyan) : nullEmitter,
Info: !silent && verbose ? createInfoLog(stdio.chalk.yellow) : nullEmitter,
Warning: createInfoLog(stdio.chalk.yellow)
};
function infoEmitter(message, msgType) {
emitters[msgType]?.(message);
}
const rootURL = toFileDirURL(options.root || process.cwd());
function relativeIssue(fn) {
const fnFilename = options.relative ? (uri) => relativeUriFilename(uri, rootURL) : (uri) => toFilePathOrHref(toFileURL(uri, rootURL));
return (i) => {
const fullFilename = i.uri ? toFilePathOrHref(toFileURL(i.uri, rootURL)) : "";
const filename = i.uri ? fnFilename(i.uri) : "";
const r = {
...i,
filename,
fullFilename
};
fn(r);
};
}
const repeatIssues = false;
const issuesCollection = void 0;
const errorCollection = [];
function errorEmitter(message, error) {
if (isSpellingDictionaryLoadError(error)) error = error.cause;
const errorText = formatWithOptions({ colors: stderr.stream.hasColors?.() }, stderr.chalk.red(message), debug ? error : error.toString());
errorCollection?.push(errorText);
consoleError(errorText);
}
const resultEmitter = (result) => {
if (!fileGlobs.length && !result.files) return;
const { files, issues: issues$1, cachedFiles, filesWithIssues, errors } = result;
const numFilesWithIssues = filesWithIssues.size;
if (stderr.getColorLevel() > 0) {
stderr.write("\r");
stderr.clearLine(0);
}
if (issuesCollection?.length || errorCollection?.length) consoleError("-------------------------------------------");
if (issuesCollection?.length) {
consoleError("Issues found:");
issuesCollection.forEach((issue) => consoleError(issue));
}
const cachedFilesText = cachedFiles ? ` (${cachedFiles} from cache)` : "";
const withErrorsText = errors ? ` with ${errors} error${errors === 1 ? "" : "s"}` : "";
const numFilesWidthIssuesText = numFilesWithIssues === 1 ? "1 file" : `${numFilesWithIssues} files`;
const summaryMessage = `CSpell\u003A Files checked: ${files}${cachedFilesText}, Issues found: ${issues$1} in ${numFilesWidthIssuesText}${withErrorsText}.`;
consoleError(summaryMessage);
if (errorCollection?.length && issues$1 > 5) {
consoleError("-------------------------------------------");
consoleError("Errors:");
errorCollection.forEach((error) => consoleError(error));
}
if (options.showPerfSummary) {
consoleError("-------------------------------------------");
consoleError("Performance Summary:");
consoleError(` Files Processed: ${perfStats.filesProcessed.toString().padStart(6)}`);
consoleError(` Files Skipped : ${perfStats.filesSkipped.toString().padStart(6)}`);
consoleError(` Files Cached : ${perfStats.filesCached.toString().padStart(6)}`);
consoleError(` Processing Time: ${perfStats.elapsedTimeMs.toFixed(2).padStart(9)}ms`);
consoleError("Stats:");
const stats = Object.entries(perfStats.perf).filter((p) => !!p[1]).map(([key, value]) => [key, value.toFixed(2)]);
const padName = Math.max(...stats.map((s) => s[0].length));
const padValue = Math.max(...stats.map((s) => s[1].length));
stats.sort((a, b) => a[0].localeCompare(b[0]));
for (const [key, value] of stats) value && consoleError(` ${key.padEnd(padName)}: ${value.padStart(padValue)}ms`);
}
};
function collectPerfStats(p) {
if (p.cached) {
perfStats.filesCached++;
return;
}
perfStats.filesProcessed += p.processed ? 1 : 0;
perfStats.filesSkipped += !p.processed ? 1 : 0;
perfStats.elapsedTimeMs += p.elapsedTimeMs || 0;
if (!p.perf) return;
for (const [key, value] of Object.entries(p.perf)) if (typeof value === "number") perfStats.perf[key] = (perfStats.perf[key] || 0) + value;
}
function progress(p) {
if (!silent && showProgress) reportProgress(stderr, p, rootURL, options);
if (p.type === "ProgressFileComplete") collectPerfStats(p);
}
return {
issue: relativeIssue(silent || !issues ? nullEmitter : genIssueEmitter(stdio, stderr, issueTemplate, uniqueIssues, issuesCollection)),
error: silent ? nullEmitter : errorEmitter,
info: infoEmitter,
debug: emitters.Debug,
progress,
result: !silent && summary ? resultEmitter : nullEmitter,
features: void 0
};
}
function formatIssue(io, templateStr, issue, maxIssueTextWidth) {
function clean$1(t$1) {
return t$1.replace(/\s+/, " ");
}
const { uri = "", filename, row, col, text, context = issue.line, offset } = issue;
const contextLeft = clean$1(context.text.slice(0, offset - context.offset));
const contextRight = clean$1(context.text.slice(offset + text.length - context.offset));
const contextFull = clean$1(context.text);
const padContext = " ".repeat(Math.max(maxIssueTextWidth - text.length, 0));
const rowText = row.toString();
const colText = col.toString();
const padRowCol = " ".repeat(Math.max(1, 8 - (rowText.length + colText.length)));
const suggestions$1 = formatSuggestions(io, issue);
const msg = issue.message || (issue.isFlagged ? "Forbidden word" : "Unknown word");
const messageColored = issue.isFlagged ? `{yellow ${msg}}` : msg;
const substitutions = {
$col: colText,
$contextFull: contextFull,
$contextLeft: contextLeft,
$contextRight: contextRight,
$filename: filename,
$padContext: padContext,
$padRowCol: padRowCol,
$row: rowText,
$suggestions: suggestions$1,
$text: text,
$uri: uri,
$quickFix: formatQuickFix(io, issue),
$message: msg,
$messageColored: messageColored
};
const t = templateStr.replaceAll("$messageColored", messageColored);
const chalkTemplate = makeTemplate(io.chalk);
return substitute(chalkTemplate(t), substitutions).trimEnd();
}
function formatSuggestions(io, issue) {
if (issue.suggestionsEx) return issue.suggestionsEx.map((sug) => sug.isPreferred ? io.chalk.italic(io.chalk.bold(sug.wordAdjustedToMatchCase || sug.word)) + "*" : sug.wordAdjustedToMatchCase || sug.word).join(", ");
if (issue.suggestions) return issue.suggestions.join(", ");
return "";
}
function formatQuickFix(io, issue) {
if (!issue.suggestionsEx?.length) return "";
const preferred = issue.suggestionsEx.filter((sug) => sug.isPreferred).map((sug) => sug.wordAdjustedToMatchCase || sug.word);
if (!preferred.length) return "";
const fixes = preferred.map((w) => io.chalk.italic(io.chalk.yellow(w)));
return `fix: (${fixes.join(", ")})`;
}
function substitute(text, substitutions) {
const subs = [];
for (const [match, replaceWith] of Object.entries(substitutions)) {
const len = match.length;
for (let i$1 = text.indexOf(match); i$1 >= 0; i$1 = text.indexOf(match, i$1)) {
const end = i$1 + len;
const reg = /\b/y;
reg.lastIndex = end;
if (reg.test(text)) subs.push([
i$1,
end,
replaceWith
]);
i$1 = end;
}
}
subs.sort((a, b) => a[0] - b[0]);
let i = 0;
function sub(r) {
const [a, b, t] = r;
const prefix = text.slice(i, a);
i = b;
return prefix + t;
}
const parts = subs.map(sub);
return parts.join("") + text.slice(i);
}
function assertCheckTemplate(template) {
const r = checkTemplate(template);
if (r instanceof Error) throw r;
}
function checkTemplate(template) {
const chalk$1 = new Chalk();
const chalkTemplate = makeTemplate(chalk$1);
const substitutions = {
$col: "<col>",
$contextFull: "<contextFull>",
$contextLeft: "<contextLeft>",
$contextRight: "<contextRight>",
$filename: "<filename>",
$padContext: "<padContext>",
$padRowCol: "<padRowCol>",
$row: "<row>",
$suggestions: "<suggestions>",
$text: "<text>",
$uri: "<uri>",
$quickFix: "<quickFix>",
$message: "<message>",
$messageColored: "<messageColored>"
};
try {
const t = chalkTemplate(template);
const result = substitute(t, substitutions);
const problems = [...result.matchAll(/\$[a-z]+/gi)].map((m) => m[0]);
if (problems.length) throw new Error(`Unresolved template variable${problems.length > 1 ? "s" : ""}: ${problems.map((v) => `'${v}'`).join(", ")}`);
return true;
} catch (e) {
const msg = e instanceof Error ? e.message : `${e}`;
return new ApplicationError(msg);
}
}
//#endregion
//#region src/config/adjustConfig.ts
async function fileExists(url) {
if (url.protocol !== "file:") return false;
try {
const stats = await promises.stat(url);
return stats.isFile();
} catch (e) {
const err = toError(e);
if (err.code === "ENOENT") return false;
throw e;
}
}
async function resolveImports(configFile, imports) {
const fromConfigDir = new URL("./", configFile.url);
const fromCurrentDir = toFileDirURL("./");
const require = createRequire(fromConfigDir);
function isPackageName(name$1) {
try {
require.resolve(name$1, { paths: [fileURLToPath(fromConfigDir)] });
return true;
} catch {
return false;
}
}
const _imports = [];
for (const imp of imports) {
const url = new URL(imp, fromCurrentDir);
if (url.protocol !== "file:") {
_imports.push(imp);
continue;
}
if (await fileExists(url)) {
let rel = urlRelative(fromConfigDir, url);
if (!(rel.startsWith("./") || rel.startsWith("../"))) rel = "./" + rel;
_imports.push(rel);
continue;
}
if (url.protocol !== "file:") {
_imports.push(url.href);
continue;
}
if (isPackageName(imp)) {
_imports.push(imp);
continue;
}
throw new Error(`Cannot resolve import: ${imp}`);
}
return _imports;
}
function addImportsToMutableConfigFile(configFile, resolvedImports, comment) {
let importNode = configFile.getNode("import", []);
if (importNode.type === "scalar") {
configFile.setValue("import", [importNode.value]);
importNode = configFile.getNode("import", []);
}
assert(isCfgArrayNode(importNode));
const knownImports = new Set(importNode.value);
for (const imp of resolvedImports) {
if (knownImports.has(imp)) continue;
importNode.push(imp);
}
if (comment) configFile.setComment("import", comment);
}
async function addImportsToConfigFile(configFile, imports, comment) {
const resolvedImports = await resolveImports(configFile, imports);
if (configFile instanceof MutableCSpellConfigFile) return addImportsToMutableConfigFile(configFile, resolvedImports, comment);
const settings = configFile.settings;
let importNode = settings.import;
if (!Array.isArray(importNode)) {
importNode = typeof importNode === "string" ? [importNode] : [];
settings.import = importNode;
if (comment) configFile.setComment("import", comment);
}
assert(Array.isArray(importNode));
const knownImports = new Set(importNode);
for (const imp of resolvedImports) {
if (knownImports.has(imp)) continue;
importNode.push(imp);
}
}
function setConfigFieldValue(configFile, key, value, comment) {
configFile.setValue(key, value);
if (comment !== void 0) configFile.setComment(key, comment);
}
function addDictionariesToConfigFile(configFile, dictionaries, comment) {
if (configFile instanceof MutableCSpellConfigFile) {
const found = configFile.getValue("dictionaries");
const dicts$1 = configFile.getNode("dictionaries", []);
assert(isCfgArrayNode(dicts$1));
const knownDicts$1 = new Set(dicts$1.value);
for (const dict of dictionaries) if (!knownDicts$1.has(dict)) {
dicts$1.push(dict);
knownDicts$1.add(dict);
}
if (!found && comment) configFile.setComment("dictionaries", comment);
return;
}
const settings = configFile.settings;
const dicts = settings.dictionaries || [];
const knownDicts = new Set(dicts);
for (const dict of dictionaries) if (!knownDicts.has(dict)) {
dicts.push(dict);
knownDicts.add(dict);
}
setConfigFieldValue(configFile, "dictionaries", dicts, comment);
}
//#endregion
//#region src/config/config.ts
function applyValuesToConfigFile(config, settings, defaultValues, addComments) {
const currentSettings = config.settings || {};
for (const [k, entry] of Object.entries(defaultValues)) {
const { value: defaultValue, comment } = entry;
const key = k;
const newValue = settings[key];
const oldValue = currentSettings[key];
const value = newValue ?? oldValue ?? defaultValue;
if (newValue === void 0 && oldValue !== void 0 || value === void 0) continue;
const useComment = addComments && oldValue === void 0 && comment || void 0;
setConfigFieldValue(config, key, value, useComment);
}
return config;
}
//#endregion
//#region src/config/constants.ts
const defaultConfig = {
$schema: {
value: void 0,
comment: " The schema for the configuration file."
},
version: {
value: "0.2",
comment: " The version of the configuration file format."
},
name: {
value: void 0,
comment: " The name of the configuration. Use for display purposes only."
},
description: {
value: void 0,
comment: " A description of the configuration."
},
language: {
value: "en",
comment: " The locale to use when spell checking. (e.g., en, en-GB, de-DE"
},
import: {
value: void 0,
comment: " Configuration or packages to import."
},
dictionaryDefinitions: {
value: void 0,
comment: " Define user dictionaries."
},
dictionaries: {
value: void 0,
comment: " Enable the dictionaries."
},
ignorePaths: {
value: void 0,
comment: " Glob patterns of files to be skipped."
},
files: {
value: void 0,
comment: " Glob patterns of files to be included."
},
words: {
value: void 0,
comment: " Words to be considered correct."
},
ignoreWords: {
value: void 0,
comment: " Words to be ignored."
},
flagWords: {
value: void 0,
comment: " Words to be flagged as incorrect."
},
overrides: {
value: void 0,
comment: " Set configuration based upon file globs."
},
languageSettings: {
value: void 0,
comment: " Define language specific settings."
},
enabledFileTypes: {
value: void 0,
comment: " Enable for specific file types."
},
caseSensitive: {
value: void 0,
comment: " Enable case sensitive spell checking."
},
patterns: {
value: void 0,
comment: " Regular expression patterns."
},
ignoreRegExpList: {
value: void 0,
comment: " Regular expressions / patterns of text to be ignored."
},
includeRegExpList: {
value: void 0,
comment: " Regular expressions / patterns of text to be included."
}
};
//#endregion
//#region src/config/configInit.ts
const schemaRef = cspellConfigFileSchema;
const defaultConfigJson = `\
{
}
`;
const defaultConfigYaml = `
`;
async function configInit(options) {
const rw = createReaderWriter();
const url = determineFileNameURL(options);
const configFile = await createConfigFile(rw, url, options);
await applyOptionsToConfigFile(configFile, options);
await fs.mkdir(new URL("./", configFile.url), { recursive: true });
if (options.stdout) console.stdoutChannel.write(rw.serialize(configFile));
else await rw.writeConfig(configFile);
}
async function applyOptionsToConfigFile(configFile, options) {
const settings = {};
const addComments = options.comments || options.comments === void 0 && !options.removeComments && !configFile.url.pathname.endsWith(".json");
if (options.comments === false) configFile.removeAllComments();
if (options.schema ?? true) configFile.setSchema(schemaRef);
if (options.locale) settings.language = options.locale;
applyValuesToConfigFile(configFile, settings, defaultConfig, addComments);
if (options.import) await addImportsToConfigFile(configFile, options.import, addComments && defaultConfig.import?.comment || void 0);
if (options.dictionary) addDictionariesToConfigFile(configFile, options.dictionary, addComments && defaultConfig.dictionaries?.comment || void 0);
return configFile;
}
function determineFileNameURL(options) {
if (options.config) return toFileURL(options.config);
const defaultFileName = determineDefaultFileName(options);
const outputUrl = toFileURL(options.output || defaultFileName);
const path$2 = outputUrl.pathname;
if (path$2.endsWith(".json") || path$2.endsWith(".jsonc") || path$2.endsWith(".yaml") || path$2.endsWith(".yml")) return outputUrl;
if (/\.{m,c}?{j,t}s$/.test(path$2)) throw new Error(`Unsupported file extension: ${path$2}`);
return new URL(defaultFileName, toFileDirURL(outputUrl));
}
function determineDefaultFileName(options) {
switch (options.format || "yaml") {
case "json": return "cspell.json";
case "jsonc": return "cspell.jsonc";
case "yaml": return "cspell.config.yaml";
case "yml": return "cspell.config.yml";
}
throw new Error(`Unsupported format: ${options.format}`);
}
function getDefaultContent(options) {
switch (options.format) {
case void 0:
case "yaml": return defaultConfigYaml;
case "json":
case "jsonc": return defaultConfigJson;
default: throw new Error(`Unsupported format: ${options.format}`);
}
}
async function createConfigFile(rw, url, options) {
if (url.pathname.endsWith("package.json")) return rw.readConfig(url);
const content = await fs.readFile(url, "utf8").catch(() => getDefaultContent(options));
return rw.parse({
url,
content
});
}
//#endregion
//#region src/featureFlags/featureFlags.ts
function getFeatureFlags() {
return getSystemFeatureFlags();
}
function parseFeatureFlags(flags, featureFlags = getFeatureFlags()) {
if (!flags) return featureFlags;
const flagsKvP = flags.map((f) => f.split(":", 2));
for (const flag of flagsKvP) {
const [name$1, value] = flag;
try {
featureFlags.setFlag(name$1, value);
} catch {
console.warn(`Unknown flag: "${name$1}"`);
}
}
return featureFlags;
}
//#endregion
//#region src/environment.ts
const environmentKeys = {
CSPELL_ENABLE_DICTIONARY_LOGGING: "CSPELL_ENABLE_DICTIONARY_LOGGING",
CSPELL_ENABLE_DICTIONARY_LOG_FILE: "CSPELL_ENABLE_DICTIONARY_LOG_FILE",
CSPELL_ENABLE_DICTIONARY_LOG_FIELDS: "CSPELL_ENABLE_DICTIONARY_LOG_FIELDS",
CSPELL_GLOB_ROOT: "CSPELL_GLOB_ROOT",
CSPELL_CONFIG_PATH: "CSPELL_CONFIG_PATH",
CSPELL_DEFAULT_CONFIG_PATH: "CSPELL_DEFAULT_CONFIG_PATH"
};
function setEnvironmentVariable(key, value) {
process.env[key] = value;
}
function getEnvironmentVariable(key) {
return process.env[key];
}
function truthy(value) {
switch (value?.toLowerCase().trim()) {
case "t":
case "true":
case "on":
case "yes":
case "1": return true;
}
return false;
}
//#endregion
//#region src/dirname.ts
let _dirname;
try {
if (typeof import.meta.url !== "string") throw new Error("assert");
_dirname = fileURLToPath(new URL(".", import.meta.url));
} catch {
_dirname = __dirname;
}
const pkgDir = _dirname;
//#endregion
//#region src/pkgInfo.ts
const name = "cspell";
const version$1 = "9.2.0";
const engines = { node: ">=20" };
const npmPackage = {
name,
version: version$1,
engines
};
//#endregion
//#region src/util/async.ts
const asyncMap = operators.opMapAsync;
const asyncFilter = operators.opFilterAsync;
const asyncAwait = operators.opAwaitAsync;
const asyncFlatten = operators.opFlattenAsync;
//#endregion
//#region src/util/constants.ts
const UTF8 = "utf8";
const STDIN = "stdin";
const STDINProtocol = "stdin:";
const STDINUrlPrefix = "stdin://";
const FileUrlPrefix = "file://";
const FileUrlAbsPrefix = "file:///";
//#endregion
//#region src/util/glob.ts
const defaultExcludeGlobs = ["node_modules/**"];
/**
*
* @param pattern - glob patterns and NOT file paths. It can be a file path turned into a glob.
* @param options - search options.
*/
async function globP(pattern, options) {
const cwd = options?.root || options?.cwd || process.cwd();
const ignoreRaw = typeof options?.ignore === "string" ? [options.ignore] : options?.ignore;
const ignore = ignoreRaw?.filter((g) => !g.startsWith("../"));
const onlyFiles = options?.nodir;
const dot = options?.dot;
const patterns = typeof pattern === "string" ? [pattern] : pattern;
const useOptions = clean({
cwd,
onlyFiles,
dot,
ignore,
absolute: true,
followSymbolicLinks: false,
expandDirectories: false
});
const compare$1 = new Intl.Collator("en").compare;
const absolutePaths = (await glob$1(patterns, useOptions)).sort(compare$1);
const relativePaths = absolutePaths.map((absFilename) => path$1.relative(cwd, absFilename));
return relativePaths;
}
function calcGlobs(commandLineExclude) {
const globs = new Set((commandLineExclude || []).flatMap((glob$2) => glob$2.split(/(?<!\\)\s+/g)).map((g) => g.replaceAll("\\ ", " ")));
const commandLineExcludes = {
globs: [...globs],
source: "arguments"
};
const defaultExcludes = {
globs: defaultExcludeGlobs,
source: "default"
};
return commandLineExcludes.globs.length ? commandLineExcludes : defaultExcludes;
}
function extractPatterns(globs) {
const r = globs.reduce((info, g) => {
const source = g.source;
const patterns = g.matcher.patternsNormalizedToRoot;
return [...info, ...patterns.map((glob$2) => ({
glob: glob$2,
source
}))];
}, []);
return r;
}
function calcExcludeGlobInfo(root, commandLineExclude) {
commandLineExclude = typeof commandLineExclude === "string" ? [commandLineExclude] : commandLineExclude;
const choice = calcGlobs(commandLineExclude);
const matcher = new GlobMatcher(choice.globs, {
root,
dot: true
});
return [{
matcher,
source: choice.source
}];
}
/**
* Build GlobMatcher from command line or config file globs.
* @param globs Glob patterns or file paths
* @param root - directory to use as the root
*/
function buildGlobMatcher(globs, root, isExclude) {
const withRoots = globs.map((g) => {
const source = typeof g === "string" ? "command line" : void 0;
return {
source,
...fileOrGlobToGlob(g, root)
};
});
return new GlobMatcher(withRoots, {
root,
mode: isExclude ? "exclude" : "include"
});
}
function extractGlobsFromMatcher(globMatcher) {
return globMatcher.patternsNormalizedToRoot.map((g) => g.glob);
}
function normalizeGlobsToRoot(globs, root, isExclude) {
const urls = globs.filter((g) => typeof g === "string" && isPossibleUrlRegExp.test(g));
const onlyGlobs = globs.filter((g) => typeof g !== "string" || !isPossibleUrlRegExp.test(g));
return [urls, extractGlobsFromMatcher(buildGlobMatcher(onlyGlobs, root, isExclude))].flat();
}
const isPossibleGlobRegExp = /[()*?[{}]/;
const isPossibleUrlRegExp = /^[\d_a-z-]{3,}:\/\//;
/**
* If a 'glob' is a path to a directory, then append `**` so that
* directory searches work.
* @param glob - a glob, file, or directory
* @param root - root to use.
* @returns `**` is appended directories.
*/
async function adjustPossibleDirectory(glob$2, root) {
const g = typeof glob$2 === "string" ? {
glob: glob$2,
root
} : {
glob: glob$2.glob,
root: glob$2.root ?? root
};
if (isPossibleGlobRegExp.test(g.glob)) return glob$2;
if (isPossibleUrlRegExp.test(g.glob)) return glob$2;
const dirPath = path$1.resolve(g.root, g.glob);
try {
const stat$1 = await promises.stat(dirPath);
if (stat$1.isDirectory()) {
const useGlob = posix.join(posixPath(g.glob), "**");
return typeof glob$2 === "string" ? useGlob : {
...glob$2,
glob: useGlob
};
}
} catch {
return glob$2;
}
return glob$2;
}
function posixPath(p) {
return path$1.sep === "\\" ? p.replaceAll("\\", "/") : p;
}
async function normalizeFileOrGlobsToRoot(globs, root) {
const adjustedGlobs = await Promise.all(globs.map((g) => adjustPossibleDirectory(g, root)));
return normalizeGlobsToRoot(adjustedGlobs, root, false);
}
function glob$1(patterns, options) {
patterns = typeof patterns === "string" ? workaroundPicomatchBug(patterns) : patterns.map((g) => workaroundPicomatchBug(g));
return glob(patterns, options);
}
//#endregion
//#region src/util/stdin.ts
function readStdin() {
return readline.createInterface(process.stdin);
}
//#endregion
//#region src/util/stdinUrl.ts
function isStdinUrl(url) {
if (url instanceof URL) return url.protocol === STDINProtocol;
return url.startsWith(STDINProtocol);
}
/**
* Normalize and resolve a stdin url.
* @param url - stdin url to resolve.
* @param cwd - file path to resolve relative paths against.
* @returns
*/
function resolveStdinUrl(url, cwd) {
assert(url.startsWith(STDINProtocol), `Expected url to start with ${STDINProtocol}`);
const path$2 = decodeURIComponent(url).slice(STDINProtocol.length).replace(/^\/\//, "").replace(/^\/([a-z]:)/i, "$1");
const fileUrl = toFileURL(path$2, cwd);
return new URL(fileUrl.toString().replace(/^file:/, STDINProtocol) + (path$2 ? "" : "/"));
}
//#endregion
//#region src/util/fileHelper.ts
function fileInfoToDocument(fileInfo, languageId, locale) {
const { filename, text } = fileInfo;
languageId = languageId || void 0;
locale = locale || void 0;
const uri = filenameToUrl(filename);
if (uri.href.startsWith(STDINProtocol)) return clean({
uri: uri.href,
text,
languageId,
locale
});
return fileToDocument(uri.href, text, languageId, locale);
}
function filenameToUrl(filename, cwd = ".") {
if (filename instanceof URL) return filename;
const cwdURL = toFileDirURL(cwd);
if (filename === STDIN) return new URL("stdin:///");
if (isStdinUrl(filename)) return new URL(resolveStdinUrl(filename, cwd));
return toFileURL(filename, cwdURL);
}
function filenameToUri(filename, cwd) {
return toURL(filenameToUrl(filename, cwd));
}
function isBinaryFile$1(filename, cwd) {
const uri = filenameToUri(filename, cwd);
if (uri.protocol.startsWith("stdin")) return false;
return isBinaryFile(uri);
}
function resolveFilenameToUrl(filename, cwd) {
if (filename instanceof URL) return filename;
if (filename === STDIN) return new URL(STDINUrlPrefix);
if (filename.startsWith(FileUrlAbsPrefix)) return new URL(filename);
const cwdUrl = toFileDirURL(cwd || process.cwd());
if (filename.startsWith(FileUrlPrefix)) return new URL(filename.slice(FileUrlPrefix.length), cwdUrl);
if (isStdinUrl(filename)) return resolveStdinUrl(filename, cwdUrl);
return toFileURL(filename, cwdUrl);
}
function resolveFilename(filename, cwd) {
return toFilePathOrHref(resolveFilenameToUrl(filename, cwd));
}
function readFileInfo(filename, encoding = UTF8, handleNotFound = false) {
filename = resolveFilename(filename);
const pText = filename.startsWith(STDINProtocol) ? streamConsumers.text(process.stdin) : readFileText(filename, encoding);
return pText.then((text) => ({
text,
filename
}), (e) => {
const error = toError(e);
return handleNotFound && error.code === "EISDIR" ? Promise.resolve({
text: "",
filename,
errorCode: error.code
}) : handleNotFound && error.code === "ENOENT" ? Promise.resolve({
text: "",
filename,
errorCode: error.code
}) : Promise.reject(new IOError(`Error reading file: "${filename}"`, error));
});
}
function readFile(filename, encoding = UTF8) {
return readFileInfo(filename, encoding).then((info) => info.text);
}
/**
* Looks for matching glob patterns or stdin
* @param globPatterns patterns or stdin
*/
async function findFiles(globPatterns, options) {
const stdin = [];
const globPats = globPatterns.filter((filename) => !isStdin(filename) && !filename.startsWith(FileUrlPrefix) ? true : (stdin.push(filename), false));
const globResults = globPats.length ? await globP(globPats, options) : [];
const cwd = options.cwd || process.cwd();
return [...stdin, ...globResults].map((filename) => resolveFilename(filename, cwd));
}
const resolveFilenames = asyncMap(resolveFilename);
/**
* Read
* @param listFiles - array of file paths to read that will contain a list of files. Paths contained in each
* file will be resolved relative to the containing file.
* @returns - a list of files to be processed.
*/
function readFileListFiles(listFiles) {
let useStdin = false;
const files = listFiles.filter((file) => {
const isStdin$1 = file === "stdin";
useStdin = useStdin || isStdin$1;
return !isStdin$1;
});
const found = asyncPipe(files, asyncMap((file) => readFileListFile(file)), asyncAwait(), asyncFlatten());
const stdin = useStdin ? readStdin() : [];
return asyncPipe(mergeAsyncIterables(found, stdin), resolveFilenames);
}
/**
* Read a `listFile` and return the containing file paths resolved relative to the `listFile`.
* @param listFiles - array of file paths to read that will contain a list of files. Paths contained in each
* file will be resolved relative to the containing file.
* @returns - a list of files to be processed.
*/
async function readFileListFile(listFile) {
try {
const relTo = path$1.resolve(path$1.dirname(listFile));
const content = await readFile(listFile);
const lines = content.split("\n").map((a) => a.trim()).filter((a) => !!a).map((file) => path$1.resolve(relTo, file));
return lines;
} catch (err) {
throw toApplicationError(err, `Error reading file list from: "${listFile}"`);
}
}
function isStdin(filename) {
return filename === STDIN || isStdinUrl(filename);
}
async function isFile(filename) {
if (isStdin(filename)) return true;
try {
const stat$1 = await promises.stat(filename);
return stat$1.isFile();
} catch {
return false;
}
}
async function isDir(filename) {
try {
const stat$1 = await promises.stat(filename);
return stat$1.isDirectory();
} catch {
return false;
}
}
function isNotDir(filename) {
return isDir(filename).then((a) => !a);
}
function relativeToCwd(filename, cwd = process.cwd()) {
const urlCwd = toFileDirURL(cwd);
const url = toFileURL(filename, urlCwd);
const rel = urlRelative(urlCwd, url);
if (rel.startsWith("..")) return toFilePathOrHref(url);
return rel;
}
//#endregion
//#region src/util/cache/file-entry-cache/flatCache.ts
var FlatCache = class {
#cache;
constructor(cacheFilename) {
this.cacheFilename = cacheFilename;
this.#cache = /* @__PURE__ */ new Map();
}
keys() {
return this.#cache.keys();
}
set(key, value) {
this.#cache.set(key, value);
return this;
}
removeKey(key) {
this.#cache.delete(key);
}
get(key) {
return this.#cache.get(key);
}
async load(ifFound = true) {
this.#cache.clear();
try {
const content = await fs.readFile(this.cacheFilename, "utf8");
this.#cache = new Map(Object.entries(parse(content)));
} catch (error) {
if (!ifFound) throw error;
}
return this;
}
async save() {
const dir = new URL(".", this.cacheFilename);
await fs.mkdir(dir, { recursive: true });
const content = stringify(Object.fromEntries(this.#cache.entries()));
await fs.writeFile(this.cacheFilename, content, "utf8");
}
/**
* Clear the cache and remove the cache file from disk.
*/
async destroy() {
this.#cache.clear();
try {
await fs.unlink(this.cacheFilename);
} catch {}
}
};
/**
*
* @param cachefile - The location of the cache file.
* @returns
*/
function loadCacheFile(cachefile) {
const cache = new FlatCache(cachefile);
return cache.load();
}
//#endregion
//#region src/util/cache/file-entry-cache/file-entry-cache.ts
async function createFromFile$1(cacheFileUrl, useChecksum, currentWorkingDir) {
const cache = await loadCacheFile(cacheFileUrl);
const fec = new ImplFileEntryCache(cache, useChecksum ?? false, currentWorkingDir);
await fec.removeNotFoundFiles();
return fec;
}
var ImplFileEntryCache = class {
cache;
useChecksum;
#normalizedEntries = /* @__PURE__ */ new Map();
/**
* To enable relative paths as the key with current working directory
*/
currentWorkingDir;
constructor(cache, useChecksum, currentWorkingDir) {
this.cache = cache;
this.useChecksum = useChecksum || false;
this.currentWorkingDir = currentWorkingDir ? fileURLToPath(currentWorkingDir) : void 0;
}
async removeNotFoundFiles() {
for (const fPath of this.cache.keys()) try {
const filePath = this.resolveKeyToFile(fPath);
await fs.stat(filePath);
} catch (error) {
if (isNodeError(error) && error.code === "ENOENT") this.cache.removeKey(fPath);
}
}
/**
* Given a buffer, calculate md5 hash of its content.
* @param buffer buffer to calculate hash on
* @return content hash digest
*/
#getHash(buffer) {
return crypto.createHash("md5").update(buffer).digest("hex");
}
async getFileDescriptor(file) {
let fstat;
try {
fstat = await fs.stat(file);
} catch (error) {
this.#removeEntry(file);
return {
key: file,
notFound: true,
err: toError$1(error)
};
}
if (this.useChecksum) return this.#getFileDescriptorUsingChecksum(file);
return this.#getFileDescriptorUsingMtimeAndSize(file, fstat);
}
#getFileDescriptorUsingMtimeAndSize(file, fstat) {
const key = this.#getFileKey(file);
let meta = this.cache.get(key);
const cacheExists = !!meta;
const cSize = fstat.size;
const cTime = fstat.mtime.getTime();
let isDifferentDate;
let isDifferentSize;
if (meta) {
isDifferentDate = cTime !== meta.mtime;
isDifferentSize = cSize !== meta.size;
} else meta = {
size: cSize,
mtime: cTime
};
const nEntry = {
key,
changed: !cacheExists || isDifferentDate || isDifferentSize,
meta
};
this.#normalizedEntries.set(key, nEntry);
return nEntry;
}
async #getFileDescriptorUsingChecksum(file) {
const key = this.#getFileKey(file);
let meta = this.cache.get(key);
const cacheExists = !!meta;
let contentBuffer;
try {
contentBuffer = await fs.readFile(file);
} catch {
contentBuffer = "";
}
let isDifferent = true;
const hash = this.#getHash(contentBuffer);
if (meta) isDifferent = hash !== meta.hash;
else meta = { hash };
const nEntry = {
key,
changed: !cacheExists || isDifferent,
meta
};
this.#normalizedEntries.set(key, nEntry);
return nEntry;
}
/**
* Remove an entry from the file-entry-cache. Useful to force the file to still be considered
* modified the next time the process is run
*/
#removeEntry(file) {
const key = this.#getFileKey(file);
this.#normalizedEntries.delete(key);
this.cache.removeKey(key);
}
/**
* Deletes the cache file from the disk and clears the memory cache
*/
async destroy() {
this.#normalizedEntries.clear();
await this.cache.destroy();
}
async #getMetaForFileUsingCheckSum(cacheEntry) {
const filePath = this.resolveKeyToFile(cacheEntry.key);
const contentBuffer = await fs.readFile(filePath);
const hash = this.#getHash(contentBuffer);
const meta = {
...cacheEntry.meta,
hash
};
delete meta.size;
delete meta.mtime;
return meta;
}
async #getMetaForFileUsingMtimeAndSize(cacheEntry) {
const filePath = this.resolveKeyToFile(cacheEntry.key);
const stat$1 = await fs.stat(filePath);
const meta = {
...cacheEntry.meta,
size: stat$1.size,
mtime: stat$1.mtime.getTime()
};
delete meta.hash;
return meta;
}
/**
* Sync the files and persist them to the cache
*/
async reconcile() {
await this.removeNotFoundFiles();
for (const [entryKey, cacheEntry] of this.#normalizedEntries.entries()) try {
const meta = this.useChecksum ? await this.#getMetaForFileUsingCheckSum(cacheEntry) : await this.#getMetaForFileUsingMtimeAndSize(cacheEntry);
this.cache.set(entryKey, meta);
} catch (error) {
if (!isNodeError(error) || error.code !== "ENOENT") throw error;
}
this.cache.save();
}
resolveKeyToFile(entryKey) {
if (this.currentWorkingDir) return path.resolve(this.currentWorkingDir, entryKey);
return entryKey;
}
#getFileKey(file) {
if (this.currentWorkingDir && path.isAbsolute(file)) return normalizePath$1(path.relative(this.currentWorkingDir, file));
return normalizePath$1(file);
}
};
function isNodeError(error) {
return typeof error === "object" && error !== null && "code" in error;
}
function toError$1(error) {
if (error instanceof Error) return error;
if (typeof error === "string") return new Error(error);
return new Error("Unknown error", { cause: error });
}
function normalizePath$1(filePath) {
if (path.sep === "/") return filePath;
return filePath.split(path.sep).join("/");
}
//#endregion
//#region src/util/cache/fileEntryCache.ts
function createFromFile(cacheFileUrl, useCheckSum, useRelative) {
return createFromFile$1(cacheFileUrl, useCheckSum, useRelative ? new URL("./", cacheFileUrl) : void 0);
}
//#endregion
//#region src/util/cache/ObjectCollection.ts
const compare = Intl.Collator().compare;
var ShallowObjectCollection = class {
tree = {};
get(v) {
if (typeof v !== "object" || v === null) return v;
const keys = Object.entries(v).filter((entry) => entry[1] !== void 0).sort((a, b) => compare(a[0], b[0]));
let t = this.tree;
for (const [key, obj] of keys) {
if (!t.c) t.c = /* @__PURE__ */ new Map();
const c0 = t.c.get(key);
const cc = c0 || /* @__PURE__ */ new Map();
if (!c0) t.c.set(key, cc);
const c1 = cc.get(obj);
const ccc = c1 || {};
if (!c1) cc.set(obj, ccc);
t = ccc;
}
if (t.v) return t.v;
t.v = v;
return v;
}
};
//#endregion
//#region src/util/cache/DiskCache.ts
const cacheDataKeys = {
v: "v",
r: "r",
d: "d"
};
/**
* Meta Data Version is used to detect if the structure of the meta data has changed.
* This is used in combination with the Suffix and the version of CSpell.
*/
const META_DATA_BASE_VERSION = "1";
const META_DATA_VERSION_SUFFIX = "-" + META_DATA_BASE_VERSION + "-" + Object.keys(cacheDataKeys).join("|");
/**
* Caches cspell results on disk
*/
var DiskCache = class {
cacheDir;
dependencyCache = /* @__PURE__ */ new Map();
dependencyCacheTree = {};
objectCollection = new ShallowObjectCollection();
ocCacheFileResult = new ShallowObjectCollection();
version;
constructor(cacheFileLocation, useCheckSum, cspellVersion, useUniversalCache, fileEntryCache) {
this.cacheFileLocation = cacheFileLocation;
this.useCheckSum = useCheckSum;
this.cspellVersion = cspellVersion;
this.useUniversalCache = useUniversalCache;
this.fileEntryCache = fileEntryCache;
this.cacheDir = fileURLToPath(new URL("./", cacheFileLocation));
this.version = calcVersion(cspellVersion);
}
async getCachedLintResults(filename) {
filename = normalizePath(filename);
const fileDescriptor = await this.fileEntryCache.getFileDescriptor(filename);
const meta = fileDescriptor.meta;
const data = meta?.data;
const result = data?.r;
const versionMatches = this.version === data?.v;
if (fileDescriptor.notFound || fileDescriptor.changed || !meta || !result || !versionMatches || !await this.checkDependencies(data.d)) return void 0;
const dd = { ...data };
if (dd.d) dd.d = setTreeEntry(this.dependencyCacheTree, dd.d);
dd.r = dd.r && this.normalizeResult(dd.r);
meta.data = this.objectCollection.get(dd);
const hasErrors = !!result && (result.errors > 0 || result.configErrors > 0 || result.issues.length > 0);
const cached = true;
const shouldReadFile = hasErrors;
return {
...result,
elapsedTimeMs: void 0,
fileInfo: shouldReadFile ? await readFileInfo(filename) : { filename },
cached
};
}
async setCachedLintResults({ fileInfo, elapsedTimeMs: _, cached: __,...result }, dependsUponFiles) {
const fileDescriptor = await this.fileEntryCache.getFileDescriptor(fileInfo.filename);
const meta = fileDescriptor.meta;
if (fileDescriptor.notFound || !meta) return;
const data = this.objectCollection.get({
v: this.version,
r: this.normalizeResult(result),
d: await this.calcDependencyHashes(dependsUponFiles)
});
meta.data = data;
}
async reconcile() {
await this.fileEntryCache.reconcile();
}
async reset() {
await this.fileEntryCache.destroy();
this.dependencyCache.clear();
this.dependencyCacheTree = {};
this.objectCollection = new ShallowObjectCollection();
this.ocCacheFileResult = new ShallowObjectCollection();
}
normalizeResult(result) {
const { issues, processed, errors, configErrors, reportIssueOptions,...rest } = result;
if (!Object.keys(rest).length) return this.ocCacheFileResult.get(result);
return this.ocCacheFileResult.get({
issues,
processed,
errors,
configErrors,
reportIssueOptions
});
}
async calcDependencyHashes(dependsUponFiles) {
dependsUponFiles.sort();
const c = getTreeEntry(this.dependencyCacheTree, dependsUponFiles);
if (c?.d) return c.d;
const dependencies = await Promise.all(dependsUponFiles.map((f) => this.getDependency(f)));
return setTreeEntry(this.dependencyCacheTree, dependencies);
}
async checkDependency(dep) {
const depFile = this.resolveFile(dep.f);
const cDep = this.dependencyCache.get(depFile);
if (cDep && compDep(dep, cDep)) return true;
if (