vite
Version:
Native-ESM powered web dev build tool
1,529 lines (1,517 loc) • 1.35 MB
JavaScript
import { createRequire as ___createRequire } from 'module'; const require = ___createRequire(import.meta.url);
import { __commonJS, __toDynamicImportESM, __toESM } from "./dep-Drtntmtt.js";
import { CLIENT_DIR, CLIENT_ENTRY, CLIENT_PUBLIC_PATH, CSS_LANGS_RE, DEFAULT_ASSETS_INLINE_LIMIT, DEFAULT_ASSETS_RE, DEFAULT_CLIENT_CONDITIONS, DEFAULT_CLIENT_MAIN_FIELDS, DEFAULT_CONFIG_FILES, DEFAULT_DEV_PORT, DEFAULT_PREVIEW_PORT, DEFAULT_SERVER_CONDITIONS, DEFAULT_SERVER_MAIN_FIELDS, DEP_VERSION_RE, DEV_PROD_CONDITION, ENV_ENTRY, ENV_PUBLIC_PATH, ERR_FILE_NOT_FOUND_IN_OPTIMIZED_DEP_DIR, ERR_OPTIMIZE_DEPS_PROCESSING_ERROR, ESBUILD_BASELINE_WIDELY_AVAILABLE_TARGET, FS_PREFIX, JS_TYPES_RE, KNOWN_ASSET_TYPES, METADATA_FILENAME, OPTIMIZABLE_ENTRY_RE, ROLLUP_HOOKS, SPECIAL_QUERY_RE, VERSION, VITE_PACKAGE_DIR, defaultAllowedOrigins, loopbackHosts, wildcardHosts } from "./dep-Ctugieod.js";
import { builtinModules, createRequire } from "node:module";
import { parseAst, parseAstAsync } from "rollup/parseAst";
import * as fs$1 from "node:fs";
import fs, { existsSync, promises, readFileSync } from "node:fs";
import path, { basename, dirname, extname, isAbsolute, join, normalize, posix, relative, resolve } from "node:path";
import fsp, { constants } from "node:fs/promises";
import { URL as URL$1, fileURLToPath, pathToFileURL } from "node:url";
import util, { format, inspect, promisify, stripVTControlCharacters } from "node:util";
import { performance as performance$1 } from "node:perf_hooks";
import crypto from "node:crypto";
import picomatch from "picomatch";
import esbuild, { build, formatMessages, transform } from "esbuild";
import os from "node:os";
import net from "node:net";
import childProcess, { exec, execFile, execSync } from "node:child_process";
import { promises as promises$1 } from "node:dns";
import path$1, { basename as basename$1, dirname as dirname$1, extname as extname$1, isAbsolute as isAbsolute$1, join as join$1, posix as posix$1, relative as relative$1, resolve as resolve$1, sep, win32 } from "path";
import { existsSync as existsSync$1, readFileSync as readFileSync$1, readdirSync, statSync } from "fs";
import { fdir } from "fdir";
import { gzip } from "node:zlib";
import readline from "node:readline";
import { createRequire as createRequire$1 } from "module";
import { MessageChannel, Worker } from "node:worker_threads";
import { Buffer as Buffer$1 } from "node:buffer";
import { escapePath, glob, globSync, isDynamicPattern } from "tinyglobby";
import assert from "node:assert";
import process$1 from "node:process";
import v8 from "node:v8";
import { EventEmitter } from "node:events";
import { STATUS_CODES, createServer, get } from "node:http";
import { createServer as createServer$1, get as get$1 } from "node:https";
import { ESModulesEvaluator, ModuleRunner } from "vite/module-runner";
import zlib from "zlib";
import * as qs from "node:querystring";
//#region ../../node_modules/.pnpm/picocolors@1.1.1/node_modules/picocolors/picocolors.js
var require_picocolors = __commonJS({ "../../node_modules/.pnpm/picocolors@1.1.1/node_modules/picocolors/picocolors.js"(exports, module) {
let p = process || {}, argv = p.argv || [], env$1 = p.env || {};
let isColorSupported = !(!!env$1.NO_COLOR || argv.includes("--no-color")) && (!!env$1.FORCE_COLOR || argv.includes("--color") || p.platform === "win32" || (p.stdout || {}).isTTY && env$1.TERM !== "dumb" || !!env$1.CI);
let formatter = (open$2, close$1, replace = open$2) => (input) => {
let string = "" + input, index = string.indexOf(close$1, open$2.length);
return ~index ? open$2 + replaceClose(string, close$1, replace, index) + close$1 : open$2 + string + close$1;
};
let replaceClose = (string, close$1, replace, index) => {
let result = "", cursor = 0;
do {
result += string.substring(cursor, index) + replace;
cursor = index + close$1.length;
index = string.indexOf(close$1, cursor);
} while (~index);
return result + string.substring(cursor);
};
let createColors = (enabled$1 = isColorSupported) => {
let f$1 = enabled$1 ? formatter : () => String;
return {
isColorSupported: enabled$1,
reset: f$1("\x1B[0m", "\x1B[0m"),
bold: f$1("\x1B[1m", "\x1B[22m", "\x1B[22m\x1B[1m"),
dim: f$1("\x1B[2m", "\x1B[22m", "\x1B[22m\x1B[2m"),
italic: f$1("\x1B[3m", "\x1B[23m"),
underline: f$1("\x1B[4m", "\x1B[24m"),
inverse: f$1("\x1B[7m", "\x1B[27m"),
hidden: f$1("\x1B[8m", "\x1B[28m"),
strikethrough: f$1("\x1B[9m", "\x1B[29m"),
black: f$1("\x1B[30m", "\x1B[39m"),
red: f$1("\x1B[31m", "\x1B[39m"),
green: f$1("\x1B[32m", "\x1B[39m"),
yellow: f$1("\x1B[33m", "\x1B[39m"),
blue: f$1("\x1B[34m", "\x1B[39m"),
magenta: f$1("\x1B[35m", "\x1B[39m"),
cyan: f$1("\x1B[36m", "\x1B[39m"),
white: f$1("\x1B[37m", "\x1B[39m"),
gray: f$1("\x1B[90m", "\x1B[39m"),
bgBlack: f$1("\x1B[40m", "\x1B[49m"),
bgRed: f$1("\x1B[41m", "\x1B[49m"),
bgGreen: f$1("\x1B[42m", "\x1B[49m"),
bgYellow: f$1("\x1B[43m", "\x1B[49m"),
bgBlue: f$1("\x1B[44m", "\x1B[49m"),
bgMagenta: f$1("\x1B[45m", "\x1B[49m"),
bgCyan: f$1("\x1B[46m", "\x1B[49m"),
bgWhite: f$1("\x1B[47m", "\x1B[49m"),
blackBright: f$1("\x1B[90m", "\x1B[39m"),
redBright: f$1("\x1B[91m", "\x1B[39m"),
greenBright: f$1("\x1B[92m", "\x1B[39m"),
yellowBright: f$1("\x1B[93m", "\x1B[39m"),
blueBright: f$1("\x1B[94m", "\x1B[39m"),
magentaBright: f$1("\x1B[95m", "\x1B[39m"),
cyanBright: f$1("\x1B[96m", "\x1B[39m"),
whiteBright: f$1("\x1B[97m", "\x1B[39m"),
bgBlackBright: f$1("\x1B[100m", "\x1B[49m"),
bgRedBright: f$1("\x1B[101m", "\x1B[49m"),
bgGreenBright: f$1("\x1B[102m", "\x1B[49m"),
bgYellowBright: f$1("\x1B[103m", "\x1B[49m"),
bgBlueBright: f$1("\x1B[104m", "\x1B[49m"),
bgMagentaBright: f$1("\x1B[105m", "\x1B[49m"),
bgCyanBright: f$1("\x1B[106m", "\x1B[49m"),
bgWhiteBright: f$1("\x1B[107m", "\x1B[49m")
};
};
module.exports = createColors();
module.exports.createColors = createColors;
} });
//#endregion
//#region src/shared/constants.ts
/**
* Prefix for resolved Ids that are not valid browser import specifiers
*/
const VALID_ID_PREFIX = `/@id/`;
/**
* Plugins that use 'virtual modules' (e.g. for helper functions), prefix the
* module ID with `\0`, a convention from the rollup ecosystem.
* This prevents other plugins from trying to process the id (like node resolution),
* and core features like sourcemaps can use this info to differentiate between
* virtual modules and regular files.
* `\0` is not a permitted char in import URLs so we have to replace them during
* import analysis. The id will be decoded back before entering the plugins pipeline.
* These encoded virtual ids are also prefixed by the VALID_ID_PREFIX, so virtual
* modules in the browser end up encoded as `/@id/__x00__{id}`
*/
const NULL_BYTE_PLACEHOLDER = `__x00__`;
let SOURCEMAPPING_URL = "sourceMa";
SOURCEMAPPING_URL += "ppingURL";
const MODULE_RUNNER_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-generated";
const ERR_OUTDATED_OPTIMIZED_DEP = "ERR_OUTDATED_OPTIMIZED_DEP";
//#endregion
//#region src/shared/utils.ts
const isWindows = typeof process !== "undefined" && process.platform === "win32";
/**
* Prepend `/@id/` and replace null byte so the id is URL-safe.
* This is prepended to resolved ids that are not valid browser
* import specifiers by the importAnalysis plugin.
*/
function wrapId(id) {
return id.startsWith(VALID_ID_PREFIX) ? id : VALID_ID_PREFIX + id.replace("\0", NULL_BYTE_PLACEHOLDER);
}
/**
* Undo {@link wrapId}'s `/@id/` and null byte replacements.
*/
function unwrapId(id) {
return id.startsWith(VALID_ID_PREFIX) ? id.slice(VALID_ID_PREFIX.length).replace(NULL_BYTE_PLACEHOLDER, "\0") : id;
}
const windowsSlashRE = /\\/g;
function slash(p$1) {
return p$1.replace(windowsSlashRE, "/");
}
const postfixRE = /[?#].*$/;
function cleanUrl(url$6) {
return url$6.replace(postfixRE, "");
}
function splitFileAndPostfix(path$13) {
const file = cleanUrl(path$13);
return {
file,
postfix: path$13.slice(file.length)
};
}
function withTrailingSlash(path$13) {
if (path$13[path$13.length - 1] !== "/") return `${path$13}/`;
return path$13;
}
const AsyncFunction$1 = async function() {}.constructor;
function promiseWithResolvers() {
let resolve$4;
let reject;
const promise = new Promise((_resolve, _reject) => {
resolve$4 = _resolve;
reject = _reject;
});
return {
promise,
resolve: resolve$4,
reject
};
}
//#endregion
//#region ../../node_modules/.pnpm/@jridgewell+sourcemap-codec@1.5.0/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
const comma = ",".charCodeAt(0);
const semicolon = ";".charCodeAt(0);
const chars$1 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
const intToChar = new Uint8Array(64);
const charToInt = new Uint8Array(128);
for (let i$1 = 0; i$1 < 64; i$1++) {
const c = chars$1.charCodeAt(i$1);
intToChar[i$1] = c;
charToInt[c] = i$1;
}
function decodeInteger(reader, relative$3) {
let value$1 = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value$1 |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value$1 & 1;
value$1 >>>= 1;
if (shouldNegate) value$1 = -2147483648 | -value$1;
return relative$3 + value$1;
}
function encodeInteger(builder, num, relative$3) {
let delta = num - relative$3;
delta = delta < 0 ? -delta << 1 | 1 : delta << 1;
do {
let clamped = delta & 31;
delta >>>= 5;
if (delta > 0) clamped |= 32;
builder.write(intToChar[clamped]);
} while (delta > 0);
return num;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max) return false;
return reader.peek() !== comma;
}
const bufLength = 1024 * 16;
const td = typeof TextDecoder !== "undefined" ? /* @__PURE__ */ new TextDecoder() : typeof Buffer !== "undefined" ? { decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
} } : { decode(buf) {
let out = "";
for (let i$1 = 0; i$1 < buf.length; i$1++) out += String.fromCharCode(buf[i$1]);
return out;
} };
var StringWriter = class {
constructor() {
this.pos = 0;
this.out = "";
this.buffer = new Uint8Array(bufLength);
}
write(v) {
const { buffer } = this;
buffer[this.pos++] = v;
if (this.pos === bufLength) {
this.out += td.decode(buffer);
this.pos = 0;
}
}
flush() {
const { buffer, out, pos } = this;
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
}
};
var StringReader = class {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
};
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(";");
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol) sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [
genColumn,
sourcesIndex,
sourceLine,
sourceColumn,
namesIndex
];
} else seg = [
genColumn,
sourcesIndex,
sourceLine,
sourceColumn
];
} else seg = [genColumn];
line.push(seg);
reader.pos++;
}
if (!sorted) sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator$1);
}
function sortComparator$1(a, b) {
return a[0] - b[0];
}
function encode$1(decoded) {
const writer = new StringWriter();
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
for (let i$1 = 0; i$1 < decoded.length; i$1++) {
const line = decoded[i$1];
if (i$1 > 0) writer.write(semicolon);
if (line.length === 0) continue;
let genColumn = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
if (j > 0) writer.write(comma);
genColumn = encodeInteger(writer, segment[0], genColumn);
if (segment.length === 1) continue;
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
sourceLine = encodeInteger(writer, segment[2], sourceLine);
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
if (segment.length === 4) continue;
namesIndex = encodeInteger(writer, segment[4], namesIndex);
}
}
return writer.flush();
}
//#endregion
//#region ../../node_modules/.pnpm/@jridgewell+resolve-uri@3.1.2/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith("//");
}
function isAbsolutePath(input) {
return input.startsWith("/");
}
function isFileUrl(input) {
return input.startsWith("file:");
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || "", match[3], match[4] || "", match[5] || "/", match[6] || "", match[7] || "");
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path$13 = match[2];
return makeUrl("file:", "", match[1] || "", "", isAbsolutePath(path$13) ? path$13 : "/" + path$13, match[3] || "", match[4] || "");
}
function makeUrl(scheme, user, host, port, path$13, query, hash$1) {
return {
scheme,
user,
host,
port,
path: path$13,
query,
hash: hash$1,
type: 7
};
}
function parseUrl$3(input) {
if (isSchemeRelativeUrl(input)) {
const url$7 = parseAbsoluteUrl("http:" + input);
url$7.scheme = "";
url$7.type = 6;
return url$7;
}
if (isAbsolutePath(input)) {
const url$7 = parseAbsoluteUrl("http://foo.com" + input);
url$7.scheme = "";
url$7.host = "";
url$7.type = 5;
return url$7;
}
if (isFileUrl(input)) return parseFileUrl(input);
if (isAbsoluteUrl(input)) return parseAbsoluteUrl(input);
const url$6 = parseAbsoluteUrl("http://foo.com/" + input);
url$6.scheme = "";
url$6.host = "";
url$6.type = input ? input.startsWith("?") ? 3 : input.startsWith("#") ? 2 : 4 : 1;
return url$6;
}
function stripPathFilename(path$13) {
if (path$13.endsWith("/..")) return path$13;
const index = path$13.lastIndexOf("/");
return path$13.slice(0, index + 1);
}
function mergePaths(url$6, base) {
normalizePath$4(base, base.type);
if (url$6.path === "/") url$6.path = base.path;
else url$6.path = stripPathFilename(base.path) + url$6.path;
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath$4(url$6, type) {
const rel = type <= 4;
const pieces = url$6.path.split("/");
let pointer = 1;
let positive = 0;
let addTrailingSlash = false;
for (let i$1 = 1; i$1 < pieces.length; i$1++) {
const piece = pieces[i$1];
if (!piece) {
addTrailingSlash = true;
continue;
}
addTrailingSlash = false;
if (piece === ".") continue;
if (piece === "..") {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
} else if (rel) pieces[pointer++] = piece;
continue;
}
pieces[pointer++] = piece;
positive++;
}
let path$13 = "";
for (let i$1 = 1; i$1 < pointer; i$1++) path$13 += "/" + pieces[i$1];
if (!path$13 || addTrailingSlash && !path$13.endsWith("/..")) path$13 += "/";
url$6.path = path$13;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve$3(input, base) {
if (!input && !base) return "";
const url$6 = parseUrl$3(input);
let inputType = url$6.type;
if (base && inputType !== 7) {
const baseUrl = parseUrl$3(base);
const baseType = baseUrl.type;
switch (inputType) {
case 1: url$6.hash = baseUrl.hash;
case 2: url$6.query = baseUrl.query;
case 3:
case 4: mergePaths(url$6, baseUrl);
case 5:
url$6.user = baseUrl.user;
url$6.host = baseUrl.host;
url$6.port = baseUrl.port;
case 6: url$6.scheme = baseUrl.scheme;
}
if (baseType > inputType) inputType = baseType;
}
normalizePath$4(url$6, inputType);
const queryHash = url$6.query + url$6.hash;
switch (inputType) {
case 2:
case 3: return queryHash;
case 4: {
const path$13 = url$6.path.slice(1);
if (!path$13) return queryHash || ".";
if (isRelative(base || input) && !isRelative(path$13)) return "./" + path$13 + queryHash;
return path$13 + queryHash;
}
case 5: return url$6.path + queryHash;
default: return url$6.scheme + "//" + url$6.user + url$6.host + url$6.port + url$6.path + queryHash;
}
}
//#endregion
//#region ../../node_modules/.pnpm/@jridgewell+trace-mapping@0.3.29/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
function stripFilename(path$13) {
if (!path$13) return "";
const index = path$13.lastIndexOf("/");
return path$13.slice(0, index + 1);
}
function resolver(mapUrl, sourceRoot) {
const from = stripFilename(mapUrl);
const prefix$1 = sourceRoot ? sourceRoot + "/" : "";
return (source) => resolve$3(prefix$1 + (source || ""), from);
}
var COLUMN$1 = 0;
var SOURCES_INDEX$1 = 1;
var SOURCE_LINE$1 = 2;
var SOURCE_COLUMN$1 = 3;
var NAMES_INDEX$1 = 4;
function maybeSort(mappings, owned) {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length) return mappings;
if (!owned) mappings = mappings.slice();
for (let i$1 = unsortedIndex; i$1 < mappings.length; i$1 = nextUnsortedSegmentLine(mappings, i$1 + 1)) mappings[i$1] = sortSegments(mappings[i$1], owned);
return mappings;
}
function nextUnsortedSegmentLine(mappings, start) {
for (let i$1 = start; i$1 < mappings.length; i$1++) if (!isSorted(mappings[i$1])) return i$1;
return mappings.length;
}
function isSorted(line) {
for (let j = 1; j < line.length; j++) if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) return false;
return true;
}
function sortSegments(line, owned) {
if (!owned) line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[COLUMN$1] - b[COLUMN$1];
}
var found = false;
function binarySearch(haystack, needle, low, high) {
while (low <= high) {
const mid = low + (high - low >> 1);
const cmp = haystack[mid][COLUMN$1] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) low = mid + 1;
else high = mid - 1;
}
found = false;
return low - 1;
}
function upperBound(haystack, needle, index) {
for (let i$1 = index + 1; i$1 < haystack.length; index = i$1++) if (haystack[i$1][COLUMN$1] !== needle) break;
return index;
}
function lowerBound(haystack, needle, index) {
for (let i$1 = index - 1; i$1 >= 0; index = i$1--) if (haystack[i$1][COLUMN$1] !== needle) break;
return index;
}
function memoizedState() {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1
};
}
function memoizedBinarySearch(haystack, needle, state, key) {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle;
return lastIndex;
}
if (needle >= lastNeedle) low = lastIndex === -1 ? 0 : lastIndex;
else high = lastIndex;
}
state.lastKey = key;
state.lastNeedle = needle;
return state.lastIndex = binarySearch(haystack, needle, low, high);
}
function parse$16(map$1) {
return typeof map$1 === "string" ? JSON.parse(map$1) : map$1;
}
var LINE_GTR_ZERO = "`line` must be greater than 0 (lines start at line 1)";
var COL_GTR_EQ_ZERO = "`column` must be greater than or equal to 0 (columns start at column 0)";
var LEAST_UPPER_BOUND = -1;
var GREATEST_LOWER_BOUND = 1;
var TraceMap = class {
constructor(map$1, mapUrl) {
const isString$1 = typeof map$1 === "string";
if (!isString$1 && map$1._decodedMemo) return map$1;
const parsed = parse$16(map$1);
const { version: version$2, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version$2;
this.file = file;
this.names = names || [];
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || void 0;
const resolve$4 = resolver(mapUrl, sourceRoot);
this.resolvedSources = sources.map(resolve$4);
const { mappings } = parsed;
if (typeof mappings === "string") {
this._encoded = mappings;
this._decoded = void 0;
} else if (Array.isArray(mappings)) {
this._encoded = void 0;
this._decoded = maybeSort(mappings, isString$1);
} else if (parsed.sections) throw new Error(`TraceMap passed sectioned source map, please use FlattenMap export instead`);
else throw new Error(`invalid source map: ${JSON.stringify(parsed)}`);
this._decodedMemo = memoizedState();
this._bySources = void 0;
this._bySourceMemos = void 0;
}
};
function cast$2(map$1) {
return map$1;
}
function encodedMappings(map$1) {
var _a, _b;
return (_b = (_a = cast$2(map$1))._encoded) != null ? _b : _a._encoded = encode$1(cast$2(map$1)._decoded);
}
function decodedMappings(map$1) {
var _a;
return (_a = cast$2(map$1))._decoded || (_a._decoded = decode(cast$2(map$1)._encoded));
}
function traceSegment(map$1, line, column) {
const decoded = decodedMappings(map$1);
if (line >= decoded.length) return null;
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast$2(map$1)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
return index === -1 ? null : segments[index];
}
function originalPositionFor(map$1, needle) {
let { line, column, bias } = needle;
line--;
if (line < 0) throw new Error(LINE_GTR_ZERO);
if (column < 0) throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map$1);
if (line >= decoded.length) return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast$2(map$1)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
if (index === -1) return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1) return OMapping(null, null, null, null);
const { names, resolvedSources } = map$1;
return OMapping(resolvedSources[segment[SOURCES_INDEX$1]], segment[SOURCE_LINE$1] + 1, segment[SOURCE_COLUMN$1], segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null);
}
function decodedMap(map$1) {
return clone(map$1, decodedMappings(map$1));
}
function encodedMap(map$1) {
return clone(map$1, encodedMappings(map$1));
}
function clone(map$1, mappings) {
return {
version: map$1.version,
file: map$1.file,
names: map$1.names,
sourceRoot: map$1.sourceRoot,
sources: map$1.sources,
sourcesContent: map$1.sourcesContent,
mappings,
ignoreList: map$1.ignoreList || map$1.x_google_ignoreList
};
}
function OMapping(source, line, column, name) {
return {
source,
line,
column,
name
};
}
function traceSegmentInternal(segments, memo, line, column, bias) {
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
else if (bias === LEAST_UPPER_BOUND) index++;
if (index === -1 || index === segments.length) return -1;
return index;
}
//#endregion
//#region ../../node_modules/.pnpm/@jridgewell+set-array@1.2.1/node_modules/@jridgewell/set-array/dist/set-array.mjs
/**
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
* index of the `key` in the backing array.
*
* This is designed to allow synchronizing a second array with the contents of the backing array,
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
* and there are never duplicates.
*/
var SetArray = class {
constructor() {
this._indexes = { __proto__: null };
this.array = [];
}
};
/**
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
* with public access modifiers.
*/
function cast$1(set) {
return set;
}
/**
* Gets the index associated with `key` in the backing array, if it is already present.
*/
function get$2(setarr, key) {
return cast$1(setarr)._indexes[key];
}
/**
* Puts `key` into the backing array, if it is not already present. Returns
* the index of the `key` in the backing array.
*/
function put(setarr, key) {
const index = get$2(setarr, key);
if (index !== void 0) return index;
const { array, _indexes: indexes } = cast$1(setarr);
const length = array.push(key);
return indexes[key] = length - 1;
}
/**
* Removes the key, if it exists in the set.
*/
function remove(setarr, key) {
const index = get$2(setarr, key);
if (index === void 0) return;
const { array, _indexes: indexes } = cast$1(setarr);
for (let i$1 = index + 1; i$1 < array.length; i$1++) {
const k = array[i$1];
array[i$1 - 1] = k;
indexes[k]--;
}
indexes[key] = void 0;
array.pop();
}
//#endregion
//#region ../../node_modules/.pnpm/@jridgewell+gen-mapping@0.3.8/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const NO_NAME = -1;
/**
* Provides the state to generate a sourcemap.
*/
var GenMapping = class {
constructor({ file, sourceRoot } = {}) {
this._names = new SetArray();
this._sources = new SetArray();
this._sourcesContent = [];
this._mappings = [];
this.file = file;
this.sourceRoot = sourceRoot;
this._ignoreList = new SetArray();
}
};
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map$1) {
return map$1;
}
/**
* Same as `addSegment`, but will only add the segment if it generates useful information in the
* resulting map. This only works correctly if segments are added **in order**, meaning you should
* not add a segment with a lower generated line/column than one that came before.
*/
const maybeAddSegment = (map$1, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
return addSegmentInternal(true, map$1, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
};
/**
* Adds/removes the content of the source file to the source map.
*/
function setSourceContent(map$1, source, content) {
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map$1);
const index = put(sources, source);
sourcesContent[index] = content;
}
function setIgnore(map$1, source, ignore = true) {
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map$1);
const index = put(sources, source);
if (index === sourcesContent.length) sourcesContent[index] = null;
if (ignore) put(ignoreList, index);
else remove(ignoreList, index);
}
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toDecodedMap(map$1) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList } = cast(map$1);
removeEmptyFinalLines(mappings);
return {
version: 3,
file: map$1.file || void 0,
names: names.array,
sourceRoot: map$1.sourceRoot || void 0,
sources: sources.array,
sourcesContent,
mappings,
ignoreList: ignoreList.array
};
}
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
function toEncodedMap(map$1) {
const decoded = toDecodedMap(map$1);
return Object.assign(Object.assign({}, decoded), { mappings: encode$1(decoded.mappings) });
}
function addSegmentInternal(skipable, map$1, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names } = cast(map$1);
const line = getLine(mappings, genLine);
const index = getColumnIndex(line, genColumn);
if (!source) {
if (skipable && skipSourceless(line, index)) return;
return insert(line, index, [genColumn]);
}
const sourcesIndex = put(sources, source);
const namesIndex = name ? put(names, name) : NO_NAME;
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) return;
return insert(line, index, name ? [
genColumn,
sourcesIndex,
sourceLine,
sourceColumn,
namesIndex
] : [
genColumn,
sourcesIndex,
sourceLine,
sourceColumn
]);
}
function getLine(mappings, index) {
for (let i$1 = mappings.length; i$1 <= index; i$1++) mappings[i$1] = [];
return mappings[index];
}
function getColumnIndex(line, genColumn) {
let index = line.length;
for (let i$1 = index - 1; i$1 >= 0; index = i$1--) {
const current = line[i$1];
if (genColumn >= current[COLUMN]) break;
}
return index;
}
function insert(array, index, value$1) {
for (let i$1 = array.length; i$1 > index; i$1--) array[i$1] = array[i$1 - 1];
array[index] = value$1;
}
function removeEmptyFinalLines(mappings) {
const { length } = mappings;
let len = length;
for (let i$1 = len - 1; i$1 >= 0; len = i$1, i$1--) if (mappings[i$1].length > 0) break;
if (len < length) mappings.length = len;
}
function skipSourceless(line, index) {
if (index === 0) return true;
const prev = line[index - 1];
return prev.length === 1;
}
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
if (index === 0) return false;
const prev = line[index - 1];
if (prev.length === 1) return false;
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
}
//#endregion
//#region ../../node_modules/.pnpm/@ampproject+remapping@2.3.0/node_modules/@ampproject/remapping/dist/remapping.mjs
const SOURCELESS_MAPPING = /* @__PURE__ */ SegmentObject("", -1, -1, "", null, false);
const EMPTY_SOURCES = [];
function SegmentObject(source, line, column, name, content, ignore) {
return {
source,
line,
column,
name,
content,
ignore
};
}
function Source(map$1, sources, source, content, ignore) {
return {
map: map$1,
sources,
source,
content,
ignore
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map$1, sources) {
return Source(map$1, sources, "", null, false);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content, ignore) {
return Source(null, EMPTY_SOURCES, source, content, ignore);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map: map$1 } = tree;
const rootNames = map$1.names;
const rootMappings = decodedMappings(map$1);
for (let i$1 = 0; i$1 < rootMappings.length; i$1++) {
const segments = rootMappings[i$1];
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
if (segment.length !== 1) {
const source$1 = rootSources[segment[1]];
traced = originalPositionFor$1(source$1, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : "");
if (traced == null) continue;
}
const { column, line, name, content, source, ignore } = traced;
maybeAddSegment(gen, i$1, genCol, source, line, column, name);
if (source && content != null) setSourceContent(gen, source, content);
if (ignore) setIgnore(gen, source, true);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor$1(source, line, column, name) {
if (!source.map) return SegmentObject(source.source, line, column, name, source.content, source.ignore);
const segment = traceSegment(source.map, line, column);
if (segment == null) return null;
if (segment.length === 1) return SOURCELESS_MAPPING;
return originalPositionFor$1(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value$1) {
if (Array.isArray(value$1)) return value$1;
return [value$1];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader$1) {
const maps = asArray(input).map((m$2) => new TraceMap(m$2, ""));
const map$1 = maps.pop();
for (let i$1 = 0; i$1 < maps.length; i$1++) if (maps[i$1].sources.length > 1) throw new Error(`Transformation map ${i$1} must have exactly one source file.\nDid you specify these with the most recent transformation maps first?`);
let tree = build$2(map$1, loader$1, "", 0);
for (let i$1 = maps.length - 1; i$1 >= 0; i$1--) tree = MapSource(maps[i$1], [tree]);
return tree;
}
function build$2(map$1, loader$1, importer, importerDepth) {
const { resolvedSources, sourcesContent, ignoreList } = map$1;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i$1) => {
const ctx = {
importer,
depth,
source: sourceFile || "",
content: void 0,
ignore: void 0
};
const sourceMap = loader$1(ctx.source, ctx);
const { source, content, ignore } = ctx;
if (sourceMap) return build$2(new TraceMap(sourceMap, source), loader$1, source, depth);
const sourceContent = content !== void 0 ? content : sourcesContent ? sourcesContent[i$1] : null;
const ignored = ignore !== void 0 ? ignore : ignoreList ? ignoreList.includes(i$1) : false;
return OriginalSource(source, sourceContent, ignored);
});
return MapSource(map$1, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
var SourceMap$1 = class {
constructor(map$1, options$1) {
const out = options$1.decodedMappings ? toDecodedMap(map$1) : toEncodedMap(map$1);
this.version = out.version;
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.ignoreList = out.ignoreList;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options$1.excludeContent) this.sourcesContent = out.sourcesContent;
}
toString() {
return JSON.stringify(this);
}
};
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader$1, options$1) {
const opts = typeof options$1 === "object" ? options$1 : {
excludeContent: !!options$1,
decodedMappings: false
};
const tree = buildSourceMapTree(input, loader$1);
return new SourceMap$1(traceMappings(tree), opts);
}
//#endregion
//#region ../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js
var require_ms$1 = __commonJS({ "../../node_modules/.pnpm/ms@2.1.3/node_modules/ms/index.js"(exports, module) {
/**
* Helpers.
*/
var s$1 = 1e3;
var m$1 = s$1 * 60;
var h$1 = m$1 * 60;
var d$1 = h$1 * 24;
var w = d$1 * 7;
var y$1 = d$1 * 365.25;
/**
* Parse or format the given `val`.
*
* Options:
*
* - `long` verbose formatting [false]
*
* @param {String|Number} val
* @param {Object} [options]
* @throws {Error} throw an error if val is not a non-empty string or a number
* @return {String|Number}
* @api public
*/
module.exports = function(val, options$1) {
options$1 = options$1 || {};
var type = typeof val;
if (type === "string" && val.length > 0) return parse$15(val);
else if (type === "number" && isFinite(val)) return options$1.long ? fmtLong$1(val) : fmtShort$1(val);
throw new Error("val is not a non-empty string or a valid number. val=" + JSON.stringify(val));
};
/**
* Parse the given `str` and return milliseconds.
*
* @param {String} str
* @return {Number}
* @api private
*/
function parse$15(str) {
str = String(str);
if (str.length > 100) return;
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(str);
if (!match) return;
var n$2 = parseFloat(match[1]);
var type = (match[2] || "ms").toLowerCase();
switch (type) {
case "years":
case "year":
case "yrs":
case "yr":
case "y": return n$2 * y$1;
case "weeks":
case "week":
case "w": return n$2 * w;
case "days":
case "day":
case "d": return n$2 * d$1;
case "hours":
case "hour":
case "hrs":
case "hr":
case "h": return n$2 * h$1;
case "minutes":
case "minute":
case "mins":
case "min":
case "m": return n$2 * m$1;
case "seconds":
case "second":
case "secs":
case "sec":
case "s": return n$2 * s$1;
case "milliseconds":
case "millisecond":
case "msecs":
case "msec":
case "ms": return n$2;
default: return void 0;
}
}
/**
* Short format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtShort$1(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d$1) return Math.round(ms / d$1) + "d";
if (msAbs >= h$1) return Math.round(ms / h$1) + "h";
if (msAbs >= m$1) return Math.round(ms / m$1) + "m";
if (msAbs >= s$1) return Math.round(ms / s$1) + "s";
return ms + "ms";
}
/**
* Long format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtLong$1(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d$1) return plural$1(ms, msAbs, d$1, "day");
if (msAbs >= h$1) return plural$1(ms, msAbs, h$1, "hour");
if (msAbs >= m$1) return plural$1(ms, msAbs, m$1, "minute");
if (msAbs >= s$1) return plural$1(ms, msAbs, s$1, "second");
return ms + " ms";
}
/**
* Pluralization helper.
*/
function plural$1(ms, msAbs, n$2, name) {
var isPlural = msAbs >= n$2 * 1.5;
return Math.round(ms / n$2) + " " + name + (isPlural ? "s" : "");
}
} });
//#endregion
//#region ../../node_modules/.pnpm/debug@4.4.1/node_modules/debug/src/common.js
var require_common$1 = __commonJS({ "../../node_modules/.pnpm/debug@4.4.1/node_modules/debug/src/common.js"(exports, module) {
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*/
function setup(env$2) {
createDebug$1.debug = createDebug$1;
createDebug$1.default = createDebug$1;
createDebug$1.coerce = coerce$1;
createDebug$1.disable = disable$1;
createDebug$1.enable = enable$1;
createDebug$1.enabled = enabled$1;
createDebug$1.humanize = require_ms$1();
createDebug$1.destroy = destroy$1;
Object.keys(env$2).forEach((key) => {
createDebug$1[key] = env$2[key];
});
/**
* The currently active debug mode names, and names to skip.
*/
createDebug$1.names = [];
createDebug$1.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
createDebug$1.formatters = {};
/**
* Selects a color for a debug namespace
* @param {String} namespace The namespace string for the debug instance to be colored
* @return {Number|String} An ANSI color code for the given namespace
* @api private
*/
function selectColor$1(namespace) {
let hash$1 = 0;
for (let i$1 = 0; i$1 < namespace.length; i$1++) {
hash$1 = (hash$1 << 5) - hash$1 + namespace.charCodeAt(i$1);
hash$1 |= 0;
}
return createDebug$1.colors[Math.abs(hash$1) % createDebug$1.colors.length];
}
createDebug$1.selectColor = selectColor$1;
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug$1(namespace) {
let prevTime$1;
let enableOverride = null;
let namespacesCache;
let enabledCache;
function debug$19(...args) {
if (!debug$19.enabled) return;
const self$1 = debug$19;
const curr = Number(/* @__PURE__ */ new Date());
const ms = curr - (prevTime$1 || curr);
self$1.diff = ms;
self$1.prev = prevTime$1;
self$1.curr = curr;
prevTime$1 = curr;
args[0] = createDebug$1.coerce(args[0]);
if (typeof args[0] !== "string") args.unshift("%O");
let index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format$3) => {
if (match === "%%") return "%";
index++;
const formatter$1 = createDebug$1.formatters[format$3];
if (typeof formatter$1 === "function") {
const val = args[index];
match = formatter$1.call(self$1, val);
args.splice(index, 1);
index--;
}
return match;
});
createDebug$1.formatArgs.call(self$1, args);
const logFn = self$1.log || createDebug$1.log;
logFn.apply(self$1, args);
}
debug$19.namespace = namespace;
debug$19.useColors = createDebug$1.useColors();
debug$19.color = createDebug$1.selectColor(namespace);
debug$19.extend = extend;
debug$19.destroy = createDebug$1.destroy;
Object.defineProperty(debug$19, "enabled", {
enumerable: true,
configurable: false,
get: () => {
if (enableOverride !== null) return enableOverride;
if (namespacesCache !== createDebug$1.namespaces) {
namespacesCache = createDebug$1.namespaces;
enabledCache = createDebug$1.enabled(namespace);
}
return enabledCache;
},
set: (v) => {
enableOverride = v;
}
});
if (typeof createDebug$1.init === "function") createDebug$1.init(debug$19);
return debug$19;
}
function extend(namespace, delimiter) {
const newDebug = createDebug$1(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace);
newDebug.log = this.log;
return newDebug;
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable$1(namespaces) {
createDebug$1.save(namespaces);
createDebug$1.namespaces = namespaces;
createDebug$1.names = [];
createDebug$1.skips = [];
const split = (typeof namespaces === "string" ? namespaces : "").trim().replace(/\s+/g, ",").split(",").filter(Boolean);
for (const ns of split) if (ns[0] === "-") createDebug$1.skips.push(ns.slice(1));
else createDebug$1.names.push(ns);
}
/**
* Checks if the given string matches a namespace template, honoring
* asterisks as wildcards.
*
* @param {String} search
* @param {String} template
* @return {Boolean}
*/
function matchesTemplate(search, template) {
let searchIndex = 0;
let templateIndex = 0;
let starIndex = -1;
let matchIndex = 0;
while (searchIndex < search.length) if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === "*")) if (template[templateIndex] === "*") {
starIndex = templateIndex;
matchIndex = searchIndex;
templateIndex++;
} else {
searchIndex++;
templateIndex++;
}
else if (starIndex !== -1) {
templateIndex = starIndex + 1;
matchIndex++;
searchIndex = matchIndex;
} else return false;
while (templateIndex < template.length && template[templateIndex] === "*") templateIndex++;
return templateIndex === template.length;
}
/**
* Disable debug output.
*
* @return {String} namespaces
* @api public
*/
function disable$1() {
const namespaces = [...createDebug$1.names, ...createDebug$1.skips.map((namespace) => "-" + namespace)].join(",");
createDebug$1.enable("");
return namespaces;
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled$1(name) {
for (const skip of createDebug$1.skips) if (matchesTemplate(name, skip)) return false;
for (const ns of createDebug$1.names) if (matchesTemplate(name, ns)) return true;
return false;
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce$1(val) {
if (val instanceof Error) return val.stack || val.message;
return val;
}
/**
* XXX DO NOT USE. This is a temporary stub function.
* XXX It WILL be removed in the next major release.
*/
function destroy$1() {
console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.");
}
createDebug$1.enable(createDebug$1.load());
return createDebug$1;
}
module.exports = setup;
} });
//#endregion
//#region ../../node_modules/.pnpm/debug@4.4.1/node_modules/debug/src/node.js
var require_node$1 = __commonJS({ "../../node_modules/.pnpm/debug@4.4.1/node_modules/debug/src/node.js"(exports, module) {
/**
* Module dependencies.
*/
const tty$1 = require("tty");
const util$3 = require("util");
/**
* This is the Node.js implementation of `debug()`.
*/
exports.init = init$2;
exports.log = log$1;
exports.formatArgs = formatArgs$1;
exports.save = save$1;
exports.load = load$2;
exports.useColors = useColors$1;
exports.destroy = util$3.deprecate(() => {}, "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.");
/**
* Colors.
*/
exports.colors = [
6,
2,
3,
4,
5,
1
];
try {
const supportsColor = require("supports-color");
if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) exports.colors = [
20,
21,
26,
27,
32,
33,
38,
39,
40,
41,
42,
43,
44,
45,
56,
57,
62,
63,
68,
69,
74,
75,
76,
77,
78,
79,
80,
81,
92,
93,
98,
99,
112,
113,
128,
129,
134,
135,
148,
149,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
178,
179,
184,
185,
196,
197,
198,
199,
200,
201,
202,
203,
204,
205,
206,
207,
208,
209,
214,
215,
220,
221
];
} catch (error$1) {}
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter((key) => {
return /^debug_/i.test(key);
}).reduce((obj, key) => {
const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => {
return k.toUpperCase();
});
let val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) val = true;
else if (/^(no|off|false|disabled)$/i.test(val)) val = false;
else if (val === "null") val = null;
else val = Number(val);
obj[prop] = val;
return obj;
}, {});
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors$1() {
return "colors" in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty$1.isatty(process.stderr.fd);
}
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs$1(args) {
const { namespace: name, useColors: useColors$2 } = this;
if (useColors$2) {
const c = this.color;
const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c);
const prefix$1 = ` ${colorCode};1m${name} \u001B[0m`;
args[0] = prefix$1 + args[0].split("\n").join("\