UNPKG

vlt

Version:
1,633 lines (1,607 loc) 47.6 kB
var global = globalThis; import {Buffer} from "node:buffer"; import {setTimeout,clearTimeout,setImmediate,clearImmediate,setInterval,clearInterval} from "node:timers"; import {createRequire as _vlt_createRequire} from "node:module"; var require = _vlt_createRequire(import.meta.filename); import { Spec2 as Spec, graphRun, graphRunSync, isSpec, joinDepIDTuple } from "./chunk-U5J4TCIV.js"; import { LRUCache, PathScurry, globSync, minimatch, promiseSpawn, whichSync } from "./chunk-B4MAUXR2.js"; import { Range, Version, asManifest, isRange, longDependencyTypes, normalizeManifest, parse, satisfies } from "./chunk-JBBINXAZ.js"; import { load, parse as parse2, stringify, walkUp } from "./chunk-QOAKZNUG.js"; import { error } from "./chunk-KVH5ECIG.js"; import { __commonJS, __toESM } from "./chunk-AECDW3EJ.js"; // ../../node_modules/.pnpm/err-code@2.0.3/node_modules/err-code/index.js var require_err_code = __commonJS({ "../../node_modules/.pnpm/err-code@2.0.3/node_modules/err-code/index.js"(exports, module) { "use strict"; function assign(obj, props) { for (const key in props) { Object.defineProperty(obj, key, { value: props[key], enumerable: true, configurable: true }); } return obj; } function createError(err, code, props) { if (!err || typeof err === "string") { throw new TypeError("Please pass an Error to err-code"); } if (!props) { props = {}; } if (typeof code === "object") { props = code; code = void 0; } if (code != null) { props.code = code; } try { return assign(err, props); } catch (_) { props.message = err.message; props.stack = err.stack; const ErrClass = function() { }; ErrClass.prototype = Object.create(Object.getPrototypeOf(err)); return assign(new ErrClass(), props); } } module.exports = createError; } }); // ../../node_modules/.pnpm/retry@0.12.0/node_modules/retry/lib/retry_operation.js var require_retry_operation = __commonJS({ "../../node_modules/.pnpm/retry@0.12.0/node_modules/retry/lib/retry_operation.js"(exports, module) { function RetryOperation(timeouts, options) { if (typeof options === "boolean") { options = { forever: options }; } this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); this._timeouts = timeouts; this._options = options || {}; this._maxRetryTime = options && options.maxRetryTime || Infinity; this._fn = null; this._errors = []; this._attempts = 1; this._operationTimeout = null; this._operationTimeoutCb = null; this._timeout = null; this._operationStart = null; if (this._options.forever) { this._cachedTimeouts = this._timeouts.slice(0); } } module.exports = RetryOperation; RetryOperation.prototype.reset = function() { this._attempts = 1; this._timeouts = this._originalTimeouts; }; RetryOperation.prototype.stop = function() { if (this._timeout) { clearTimeout(this._timeout); } this._timeouts = []; this._cachedTimeouts = null; }; RetryOperation.prototype.retry = function(err) { if (this._timeout) { clearTimeout(this._timeout); } if (!err) { return false; } var currentTime = (/* @__PURE__ */ new Date()).getTime(); if (err && currentTime - this._operationStart >= this._maxRetryTime) { this._errors.unshift(new Error("RetryOperation timeout occurred")); return false; } this._errors.push(err); var timeout = this._timeouts.shift(); if (timeout === void 0) { if (this._cachedTimeouts) { this._errors.splice(this._errors.length - 1, this._errors.length); this._timeouts = this._cachedTimeouts.slice(0); timeout = this._timeouts.shift(); } else { return false; } } var self = this; var timer = setTimeout(function() { self._attempts++; if (self._operationTimeoutCb) { self._timeout = setTimeout(function() { self._operationTimeoutCb(self._attempts); }, self._operationTimeout); if (self._options.unref) { self._timeout.unref(); } } self._fn(self._attempts); }, timeout); if (this._options.unref) { timer.unref(); } return true; }; RetryOperation.prototype.attempt = function(fn, timeoutOps) { this._fn = fn; if (timeoutOps) { if (timeoutOps.timeout) { this._operationTimeout = timeoutOps.timeout; } if (timeoutOps.cb) { this._operationTimeoutCb = timeoutOps.cb; } } var self = this; if (this._operationTimeoutCb) { this._timeout = setTimeout(function() { self._operationTimeoutCb(); }, self._operationTimeout); } this._operationStart = (/* @__PURE__ */ new Date()).getTime(); this._fn(this._attempts); }; RetryOperation.prototype.try = function(fn) { console.log("Using RetryOperation.try() is deprecated"); this.attempt(fn); }; RetryOperation.prototype.start = function(fn) { console.log("Using RetryOperation.start() is deprecated"); this.attempt(fn); }; RetryOperation.prototype.start = RetryOperation.prototype.try; RetryOperation.prototype.errors = function() { return this._errors; }; RetryOperation.prototype.attempts = function() { return this._attempts; }; RetryOperation.prototype.mainError = function() { if (this._errors.length === 0) { return null; } var counts = {}; var mainError = null; var mainErrorCount = 0; for (var i = 0; i < this._errors.length; i++) { var error2 = this._errors[i]; var message = error2.message; var count = (counts[message] || 0) + 1; counts[message] = count; if (count >= mainErrorCount) { mainError = error2; mainErrorCount = count; } } return mainError; }; } }); // ../../node_modules/.pnpm/retry@0.12.0/node_modules/retry/lib/retry.js var require_retry = __commonJS({ "../../node_modules/.pnpm/retry@0.12.0/node_modules/retry/lib/retry.js"(exports) { var RetryOperation = require_retry_operation(); exports.operation = function(options) { var timeouts = exports.timeouts(options); return new RetryOperation(timeouts, { forever: options && options.forever, unref: options && options.unref, maxRetryTime: options && options.maxRetryTime }); }; exports.timeouts = function(options) { if (options instanceof Array) { return [].concat(options); } var opts2 = { retries: 10, factor: 2, minTimeout: 1 * 1e3, maxTimeout: Infinity, randomize: false }; for (var key in options) { opts2[key] = options[key]; } if (opts2.minTimeout > opts2.maxTimeout) { throw new Error("minTimeout is greater than maxTimeout"); } var timeouts = []; for (var i = 0; i < opts2.retries; i++) { timeouts.push(this.createTimeout(i, opts2)); } if (options && options.forever && !timeouts.length) { timeouts.push(this.createTimeout(i, opts2)); } timeouts.sort(function(a, b) { return a - b; }); return timeouts; }; exports.createTimeout = function(attempt, opts2) { var random = opts2.randomize ? Math.random() + 1 : 1; var timeout = Math.round(random * opts2.minTimeout * Math.pow(opts2.factor, attempt)); timeout = Math.min(timeout, opts2.maxTimeout); return timeout; }; exports.wrap = function(obj, options, methods) { if (options instanceof Array) { methods = options; options = null; } if (!methods) { methods = []; for (var key in obj) { if (typeof obj[key] === "function") { methods.push(key); } } } for (var i = 0; i < methods.length; i++) { var method = methods[i]; var original = obj[method]; obj[method] = function retryWrapper(original2) { var op = exports.operation(options); var args = Array.prototype.slice.call(arguments, 1); var callback = args.pop(); args.push(function(err) { if (op.retry(err)) { return; } if (err) { arguments[0] = op.mainError(); } callback.apply(this, arguments); }); op.attempt(function() { original2.apply(obj, args); }); }.bind(obj, original); obj[method].options = options; } }; } }); // ../../node_modules/.pnpm/retry@0.12.0/node_modules/retry/index.js var require_retry2 = __commonJS({ "../../node_modules/.pnpm/retry@0.12.0/node_modules/retry/index.js"(exports, module) { module.exports = require_retry(); } }); // ../../node_modules/.pnpm/promise-retry@2.0.1/node_modules/promise-retry/index.js var require_promise_retry = __commonJS({ "../../node_modules/.pnpm/promise-retry@2.0.1/node_modules/promise-retry/index.js"(exports, module) { "use strict"; var errcode = require_err_code(); var retry = require_retry2(); var hasOwn = Object.prototype.hasOwnProperty; function isRetryError(err) { return err && err.code === "EPROMISERETRY" && hasOwn.call(err, "retried"); } function promiseRetry2(fn, options) { var temp; var operation; if (typeof fn === "object" && typeof options === "function") { temp = options; options = fn; fn = temp; } operation = retry.operation(options); return new Promise(function(resolve5, reject) { operation.attempt(function(number) { Promise.resolve().then(function() { return fn(function(err) { if (isRetryError(err)) { err = err.retried; } throw errcode(new Error("Retrying"), "EPROMISERETRY", { retried: err }); }, number); }).then(resolve5, function(err) { if (isRetryError(err)) { err = err.retried; if (operation.retry(err || new Error())) { return; } } reject(err); }); }); }); } module.exports = promiseRetry2; } }); // ../../src/git/src/spawn.ts var import_promise_retry = __toESM(require_promise_retry(), 1); // ../../src/git/src/make-error.ts var connectionErrorRe = new RegExp( [ "remote error: Internal Server Error", "The remote end hung up unexpectedly", "Connection timed out", "Operation timed out", "Failed to connect to .* Timed out", "Connection reset by peer", "SSL_ERROR_SYSCALL", "The requested URL returned error: 503" ].join("|") ); var missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/; var makeError = (result) => connectionErrorRe.test(result.stderr) ? [ (n) => n < 3, error("A git connection error occurred", result) ] : missingPathspecRe.test(result.stderr) ? [null, error("The git reference could not be found", result)] : [null, error("An unknown git error occurred", result)]; // ../../src/git/src/opts.ts var gitEnv = { GIT_ASKPASS: "echo", GIT_SSH_COMMAND: "ssh -oStrictHostKeyChecking=accept-new" }; var opts = (opts2 = {}) => ({ acceptFail: true, ...opts2, env: opts2.env ?? { ...gitEnv, ...process.env }, stdio: "pipe", stdioString: true, shell: false }); // ../../src/git/src/which.ts var gitPath = void 0; var which = (opts2 = {}) => { if (opts2.git) { return opts2.git; } let whichError = void 0; if (opts2.git !== false) { if (!gitPath) { try { gitPath = whichSync("git"); } catch (er) { whichError = er; } } } if (!gitPath || opts2.git === false) { return error( "No git binary found in $PATH", { code: "ENOGIT", cause: whichError }, which ); } return gitPath; }; // ../../src/git/src/spawn.ts var spawn = async (gitArgs, opts2 = {}) => { const gitPath2 = which(opts2); if (gitPath2 instanceof Error) { throw gitPath2; } const args = opts2.allowReplace || gitArgs[0] === "--no-replace-objects" ? gitArgs : ["--no-replace-objects", ...gitArgs]; const retryOpts = { retries: opts2["fetch-retries"] || 3, factor: opts2["fetch-retry-factor"] || 2, maxTimeout: opts2["fetch-retry-maxtimeout"] || 6e4, minTimeout: opts2["fetch-retry-mintimeout"] || 1e3 }; return (0, import_promise_retry.default)(async (retryFn, num) => { const result = await promiseSpawn(gitPath2, args, opts(opts2)); if (result.status || result.signal) { const [shouldRetry, gitError] = makeError(result); if (!shouldRetry?.(num)) { throw gitError; } retryFn(gitError); } return result; }, retryOpts); }; // ../../src/git/src/is.ts import { stat } from "node:fs/promises"; var is = ({ cwd = process.cwd() } = {}) => stat(cwd + "/.git").then( () => true, () => false ); // ../../src/git/src/is-clean.ts var isClean = async (opts2 = {}) => { const result = await spawn( ["status", "--porcelain=v1", "-uno"], opts2 ); if (result.status || result.signal) { throw error("git isClean check failed", result); } for (const line of result.stdout.split(/\r?\n+/)) { if (line.trim()) return false; } return true; }; // ../../src/git-scp-url/src/index.ts var knownProtocols = /* @__PURE__ */ new Set([ "http:", "https:", "git:", "git+ssh:", "git+https:", "ssh:" ]); var memo = /* @__PURE__ */ new Map(); var correctProtocol = (arg) => { const firstColon = arg.indexOf(":"); const proto = arg.slice(0, firstColon + 1); const doubleSlash = arg.indexOf("//"); if (knownProtocols.has(proto)) { if (doubleSlash === firstColon + 1) { return arg; } return proto + "//" + arg.slice(firstColon + 1); } const firstAt = arg.indexOf("@"); if (firstAt > -1) { if (firstAt > firstColon) { return `git+ssh://${arg}`; } else { return arg; } } return proto + "//" + arg.slice(firstColon + 1); }; var correctUrl = (url) => { const firstAt = url.indexOf("@"); const lastHash = url.lastIndexOf("#"); let firstColon = url.indexOf(":"); let lastColon = url.lastIndexOf( ":", lastHash > -1 ? lastHash : Infinity ); let corrected = url; if (lastColon > firstAt) { corrected = url.slice(0, lastColon) + "/" + url.slice(lastColon + 1); firstColon = corrected.indexOf(":"); lastColon = corrected.lastIndexOf(":"); } if (firstColon === -1 && !url.includes("//")) { corrected = `git+ssh://${corrected}`; } return corrected; }; var gitScpURL = (url) => { const memoized = memo.get(url); if (memoized) return memoized; try { const result = new URL(url); if (result.hostname) { memo.set(url, result); return result; } } catch { } try { const result = new URL(correctUrl(correctProtocol(url))); if (result.hostname) { memo.set(url, result); return result; } } catch { } return void 0; }; // ../../src/git/src/clone.ts import { mkdir, stat as stat2 } from "node:fs/promises"; import { basename, resolve as resolve2 } from "node:path"; import { fileURLToPath as fileURLToPath2 } from "node:url"; // ../../src/git/src/is-windows.ts var isWindows = (opts2) => (opts2.fakePlatform || process.platform) === "win32"; // ../../src/pick-manifest/src/index.ts var parsedNodeVersion = Version.parse(process.version); var isBefore = (version, before, verTimes) => { if (!verTimes || !version || !before) return true; const time = version && verTimes[version]; return !!time && Date.parse(time) <= before; }; var checkList = (value, list) => { if (typeof list === "string") { list = [list]; } if (!Array.isArray(list)) return true; if (list.length === 1 && list[0] === "any") { return true; } let negated = 0; let match = false; for (const entry of list) { const negate = entry.startsWith("!"); const test = negate ? entry.slice(1) : entry; if (negate) { negated++; if (value === test) { return false; } } else { match = match || value === test; } } return match || negated === list.length; }; var platformCheck = (mani, nodeVersion, wantOs, wantArch) => { const { engines, os, cpu } = mani; if (engines) { const { node } = engines; if (node && !satisfies(nodeVersion, node, true)) { return false; } } if (wantOs && !checkList(wantOs, os)) return false; if (wantArch && !checkList(wantArch, cpu)) return false; return true; }; var versionOk = (packument, version, nodeVersion, os, arch, before) => { const mani = packument.versions[version]; if (!mani) return false; const { time } = packument; return isBefore(version, before, time) && platformCheck(mani, nodeVersion, os, arch); }; function pickManifest(packument, wanted, opts2 = {}) { const { tag = "latest", before, "node-version": nodeVersion, os = process.platform, arch = process.arch } = opts2; const nv = !nodeVersion ? parsedNodeVersion : Version.parse(nodeVersion); const { name, time: verTimes, versions: versions2 = {}, "dist-tags": distTags2 = {} } = packument; const time = before && verTimes ? +new Date(before) : Infinity; let range = void 0; let spec = void 0; if (typeof wanted === "object") { if (isSpec(wanted)) { const f = wanted.final; range = f.range; spec = f; } else if (isRange(wanted)) { range = wanted; } } else { spec = Spec.parse(`${name}@${wanted}`).final; range = spec.range; } if (!range) { if (!spec?.distTag) { throw error( "Only dist-tag or semver range specs are supported", { spec } ); } const ver = distTags2[spec.distTag]; if (!ver) return void 0; const mani = versions2[ver]; if (mani && versionOk(packument, ver, nv, os, arch, time)) { return mani; } else { range = new Range(`<=${ver}`); } } if (range.isAny) range = new Range("*", true); const defaultVer = distTags2[tag]; const defTagVersion = defaultVer ? Version.parse(defaultVer) : void 0; if (defaultVer && (range.isAny || defTagVersion?.satisfies(range)) && versionOk(packument, defaultVer, nv, os, arch, time)) { return versions2[defaultVer]; } const entries = Object.entries(versions2); if (!entries.length) { return void 0; } let found = void 0; let foundIsDefTag = false; for (const [ver, mani] of entries) { if (time && verTimes && !isBefore(ver, time, verTimes)) { continue; } const version = parse(ver); if (!version?.satisfies(range)) { continue; } const mc = { version, deprecated: !!mani.deprecated, platform: platformCheck(mani, nv, os, arch), prerelease: !!version.prerelease?.length, mani }; if (!found) { found = mc; if (defTagVersion?.equals(found.version)) { foundIsDefTag = true; } continue; } const mok = !mc.deprecated && mc.platform; const fok = !found.deprecated && found.platform; if (mok !== fok) { if (mok) { found = mc; foundIsDefTag = !!defTagVersion?.equals(mc.version); } } else if (mc.platform !== found.platform) { if (mc.platform) { found = mc; foundIsDefTag = !!defTagVersion?.equals(mc.version); } } else if (mc.deprecated !== found.deprecated) { if (!mc.deprecated) { found = mc; foundIsDefTag = !!defTagVersion?.equals(mc.version); } } else if (found.prerelease !== mc.prerelease) { if (!mc.prerelease) { found = mc; foundIsDefTag = !!defTagVersion?.equals(mc.version); } } else if (defTagVersion?.equals(mc.version)) { found = mc; foundIsDefTag = true; } else if (mc.version.greaterThan(found.version) && !foundIsDefTag) { found = mc; } } return found?.mani; } // ../../src/git/src/revs.ts import { fileURLToPath } from "node:url"; // ../../src/git/src/lines-to-revs.ts var linesToRevs = (lines) => finish( lines.reduce(linesToRevsReducer, { name: "", versions: {}, "dist-tags": {}, refs: {}, shas: {} }) ); var finish = (revs2) => distTags(versions(shaList(peelTags(revs2)))); var versions = (revs2) => { for (const [version, entry] of Object.entries(revs2.versions)) { entry.version = version; } return revs2; }; var shaList = (revs2) => { Object.entries(revs2.refs).forEach(([ref, doc]) => { const shas = revs2.shas[doc.sha]; if (!shas) { revs2.shas[doc.sha] = [ref]; } else { shas.push(ref); } }); return revs2; }; var peelTags = (revs2) => { Object.entries(revs2.refs).filter(([ref]) => ref.endsWith("^{}")).forEach(([ref, peeled]) => { const unpeeled = revs2.refs[ref.replace(/\^\{\}$/, "")]; if (unpeeled) { unpeeled.sha = peeled.sha; delete revs2.refs[ref]; } }); return revs2; }; var distTags = (revs2) => { const HEAD = revs2.refs.HEAD ?? { sha: void 0 }; for (const [v, ver] of Object.entries(revs2.versions)) { if (revs2.refs.latest && ver.sha === revs2.refs.latest.sha) { revs2["dist-tags"].latest = v; } else if (ver.sha === HEAD.sha) { revs2["dist-tags"].HEAD = v; if (!revs2.refs.latest) { revs2["dist-tags"].latest = v; } } } return revs2; }; var refType = (ref) => { if (ref.startsWith("refs/tags/")) { return "tag"; } if (ref.startsWith("refs/heads/")) { return "branch"; } if (ref.startsWith("refs/pull/")) { return "pull"; } if (ref === "HEAD") { return "head"; } return "other"; }; var lineToRevDoc = (line) => { let [sha, rawRef] = line.trim().split(/\s+/, 2); if (sha === void 0 || rawRef === void 0) return void 0; sha = sha.trim(); rawRef = rawRef.trim(); const type = refType(rawRef); switch (type) { case "tag": { const ref = rawRef.slice("refs/tags/".length); return { name: "", version: "", sha, ref, rawRef, type }; } case "branch": { const ref = rawRef.slice("refs/heads/".length); return { name: "", version: "", sha, ref, rawRef, type }; } case "pull": { const ref = rawRef.slice("refs/".length).replace(/\/head$/, ""); return { name: "", version: "", sha, ref, rawRef, type }; } case "head": { const ref = "HEAD"; return { name: "", version: "", sha, ref, rawRef, type }; } default: return { name: "", version: "", sha, ref: rawRef, rawRef, type }; } }; var linesToRevsReducer = (revs2, line) => { const doc = lineToRevDoc(line); if (!doc) { return revs2; } revs2.refs[doc.ref] = doc; revs2.refs[doc.rawRef] = doc; if (doc.type === "tag") { const match = doc.ref.endsWith("^{}") ? null : /v?(\d+\.\d+\.\d+(?:[-+].+)?)$/.exec(doc.ref); if (match) { if (!match[1]) throw error(`invalid semver tag`, { found: doc.ref }); const v = parse(match[1]); if (v) revs2.versions[String(v)] = doc; } } return revs2; }; // ../../src/git/src/revs.ts var fetchMethod = async (repo, _, options) => { const result = await spawn(["ls-remote", repo], options.context); const revsDoc = linesToRevs(result.stdout.split("\n")); return revsDoc; }; var revsCache = new LRUCache({ max: 100, ttl: 5 * 60 * 1e3, allowStaleOnFetchAbort: true, allowStaleOnFetchRejection: true, fetchMethod }); var revs = async (repo, opts2 = {}) => { repo = String(gitScpURL(repo) ?? repo).replace(/^git\+/, ""); if (repo.startsWith("file://")) repo = fileURLToPath(repo); if (opts2.noGitRevCache) { const result = await fetchMethod(repo, void 0, { context: opts2 }); revsCache.set(repo, result); return result; } return await revsCache.fetch(repo, { context: opts2 }); }; // ../../src/git/src/resolve.ts var resolve = async (repo, ref = "HEAD", opts2 = {}) => { const revDoc = await revs(repo, opts2); if (!revDoc) return void 0; return resolveRef(revDoc, ref, opts2); }; var resolveRef = (revDoc, ref = "HEAD", opts2 = {}) => { const { spec } = opts2; ref = spec?.gitCommittish || ref; if (spec?.range) { return pickManifest(revDoc, spec.range, opts2); } if (!ref) { return revDoc.refs.HEAD; } if (revDoc.refs[ref]) { return revDoc.refs[ref]; } const sha = revDoc.shas[ref]?.[0]; if (sha) { return revDoc.refs[sha]; } return void 0; }; // ../../src/git/src/clone.ts var shallowHosts = /* @__PURE__ */ new Set([ "github.com", "gist.github.com", "gitlab.com", "bitbucket.com", "bitbucket.org" ]); var clone = async (repo, ref = "HEAD", target = void 0, opts2 = {}) => { repo = String(gitScpURL(repo) ?? repo).replace(/^git\+/, ""); if (repo.startsWith("file://")) repo = fileURLToPath2(repo); const revs2 = await revs(repo, opts2); return await clone_( repo, revs2, ref, revs2 && resolveRef(revs2, ref, opts2), target || defaultTarget(repo, opts2.cwd), opts2 ); }; var maybeShallow = (repo, opts2) => { if (opts2["git-shallow"] === false || opts2["git-shallow"]) { return opts2["git-shallow"]; } const host = gitScpURL(repo)?.host ?? ""; return shallowHosts.has(host); }; var defaultTarget = (repo, cwd = process.cwd()) => resolve2(cwd, basename(repo.replace(/[/\\]?\.git$/, ""))); var clone_ = (repo, revs2, ref, revDoc, target, opts2) => { if (!revDoc || !revs2) { return unresolved(repo, ref, target, opts2); } if (revDoc.sha === revs2.refs.HEAD?.sha) { return plain(repo, revDoc, target, opts2); } if (revDoc.type === "tag" || revDoc.type === "branch") { return branch(repo, revDoc, target, opts2); } return other(repo, revDoc, target, opts2); }; var other = async (repo, revDoc, target, opts2) => { const shallow = maybeShallow(repo, opts2); const fetchOrigin = ["fetch", "origin", revDoc.rawRef].concat( shallow ? ["--depth=1"] : [] ); const git = (args) => spawn(args, { ...opts2, cwd: target }); await mkdir(target, { recursive: true }); await git(["init"]); if (isWindows(opts2)) { await git([ "config", "--local", "--add", "core.longpaths", "true" ]); } await git(["remote", "add", "origin", repo]); await git(fetchOrigin); await git(["checkout", revDoc.sha]); await updateSubmodules(target, opts2); return revDoc.sha; }; var branch = async (repo, revDoc, target, opts2) => { const args = [ "clone", "-b", revDoc.ref, repo, target, "--recurse-submodules" ]; if (maybeShallow(repo, opts2)) { args.push("--depth=1"); } if (isWindows(opts2)) { args.push("--config", "core.longpaths=true"); } await spawn(args, opts2); return revDoc.sha; }; var plain = async (repo, revDoc, target, opts2) => { const args = ["clone", repo, target, "--recurse-submodules"]; if (maybeShallow(repo, opts2)) { args.push("--depth=1"); } if (isWindows(opts2)) { args.push("--config", "core.longpaths=true"); } await spawn(args, opts2); return revDoc.sha; }; var updateSubmodules = async (target, opts2) => { const hasSubmodules = await stat2(`${target}/.gitmodules`).then(() => true).catch(() => false); if (!hasSubmodules) { return; } await spawn( ["submodule", "update", "-q", "--init", "--recursive"], { ...opts2, cwd: target } ); }; var unresolved = async (repo, ref, target, opts2) => { const lp = isWindows(opts2) ? ["--config", "core.longpaths=true"] : []; const cloneArgs = [ "clone", "--mirror", "-q", repo, target + "/.git" ]; const git = (args) => spawn(args, { ...opts2, cwd: target }); await mkdir(target, { recursive: true }); await git(cloneArgs.concat(lp)); await git(["init"]); await git(["checkout", ref]); await updateSubmodules(target, opts2); const result = await git(["rev-parse", "--revs-only", "HEAD"]); return result.stdout; }; // ../../src/git/src/user.ts var getUser = async (opts2 = {}) => { let name = ""; let email = ""; const oldFlagUserNameResult = await spawn( ["config", "--get", "user.name"], opts2 ); if (oldFlagUserNameResult.status || oldFlagUserNameResult.signal) { const userNameResult = await spawn( ["config", "get", "user.name"], opts2 ); name = userNameResult.status || userNameResult.signal ? "" : userNameResult.stdout.trim(); } else { name = oldFlagUserNameResult.stdout.trim(); } const oldFlagUserEmailResult = await spawn( ["config", "--get", "user.email"], opts2 ); if (oldFlagUserEmailResult.status || oldFlagUserEmailResult.signal) { const userEmailResult = await spawn( ["config", "get", "user.email"], opts2 ); email = userEmailResult.status || userEmailResult.signal ? "" : userEmailResult.stdout.trim(); } else { email = oldFlagUserEmailResult.stdout.trim(); } if (!name && !email) { return void 0; } return { name, email }; }; // ../../src/package-json/src/index.ts import { readFileSync, writeFileSync, lstatSync } from "node:fs"; import { resolve as resolve3 } from "node:path"; import { homedir } from "node:os"; var exists = (path) => { try { lstatSync(path); return true; } catch { return false; } }; var PackageJson = class { /** * cache of `package.json` loads */ #cache = /* @__PURE__ */ new Map(); /** * cache of `package.json` paths by manifest */ #pathCache = /* @__PURE__ */ new Map(); /** * cache of load errors */ #errCache = /* @__PURE__ */ new Map(); /** * Reads and parses contents of a `package.json` file at a directory `dir`. * `reload` will optionally skip reading from the cache when set to `true`. */ read(dir, { reload } = {}) { const cachedPackageJson = !reload && this.#cache.get(dir); if (cachedPackageJson) { return cachedPackageJson; } const filename = dir.endsWith("package.json") ? resolve3(dir) : resolve3(dir, "package.json"); const fail = (err) => error("Could not read package.json file", err, this.read); const cachedError = !reload && this.#errCache.get(dir); if (cachedError) { throw fail(cachedError); } try { const res = normalizeManifest( asManifest( parse2(readFileSync(filename, { encoding: "utf8" })) ) ); this.#cache.set(dir, res); this.#pathCache.set(res, dir); return res; } catch (err) { const ec = { path: filename, cause: err }; this.#errCache.set(dir, ec); throw fail(ec); } } /** * Optionally reads and parses contents of a `package.json` file at a * directory `dir`. Returns `undefined` if it could not be read. */ maybeRead(dir, { reload } = {}) { try { return this.read(dir, { reload }); } catch { return void 0; } } write(dir, manifest, indent) { const filename = dir.endsWith("package.json") ? resolve3(dir) : resolve3(dir, "package.json"); this.fix(manifest); try { writeFileSync(filename, stringify(manifest, void 0, indent)); this.#cache.set(dir, manifest); this.#pathCache.set(manifest, dir); } catch (err) { this.#cache.delete(dir); this.#pathCache.delete(manifest); throw error( "Could not write package.json file", { path: filename, cause: err }, this.write ); } } save(manifest) { const dir = this.#pathCache.get(manifest); if (!dir) { throw error( "Could not save manifest", { manifest }, this.save ); } this.write(dir, manifest); } fix(manifest) { for (const depType of longDependencyTypes) { const deps = manifest[depType]; if (deps) { manifest[depType] = Object.fromEntries( Object.entries(deps).sort( ([a], [b]) => a.localeCompare(b, "en") ) ); } } } /** * Walks up the directory tree from the current working directory * and returns the path to the first `package.json` file found. * Returns undefined if no package.json is found. */ find(cwd = process.cwd(), home = homedir()) { for (const dir of walkUp(cwd)) { if (dir === home) break; const packageJsonPath = resolve3(dir, "package.json"); if (exists(packageJsonPath)) { return packageJsonPath; } } } }; // ../../src/workspaces/src/index.ts import { basename as basename2, posix, resolve as resolve4 } from "node:path"; var asWSConfig = (conf, path) => { assertWSConfig(conf, path); return typeof conf === "string" ? { packages: [conf] } : Array.isArray(conf) ? { packages: conf } : Object.fromEntries( Object.entries(conf).map(([k, v]) => [ k, typeof v === "string" ? [v] : v ]) ); }; var assertWSConfig = (conf, path) => { if (typeof conf === "string") return; if (Array.isArray(conf)) { for (const c of conf) { if (typeof c !== "string") { throw error("Invalid workspace definition", { path, found: c, wanted: "string" }); } } return; } if (conf && typeof conf === "object") { for (const [group, value] of Object.entries(conf)) { if (typeof value === "string") continue; if (Array.isArray(value)) { for (const c of value) { if (typeof c !== "string") { throw error("Invalid workspace definition", { path, name: group, found: c, wanted: "string" }); } } continue; } throw error("Invalid workspace definition", { path, name: group, found: value, wanted: "string | string[]" }); } return; } throw error("Invalid workspace definition", { path, found: conf, wanted: "string | string[] | { [group: string]: string | string[] }" }); }; var Monorepo = class _Monorepo { /** The project root where vlt.json is found */ projectRoot; /** Scurry object to cache all filesystem calls (mostly globs) */ scurry; // maps both name and path to the workspace objects #workspaces = /* @__PURE__ */ new Map(); #groups = /* @__PURE__ */ new Map(); #config; packageJson; /** * Number of {@link Workspace} objects loaded in this Monorepo */ get size() { return [...this.values()].length; } constructor(projectRoot, options = {}) { this.projectRoot = resolve4(projectRoot); this.scurry = options.scurry ?? new PathScurry(projectRoot); this.packageJson = options.packageJson ?? new PackageJson(); this.#config = options.config; if (options.load) this.load(options.load); } /** * Load the workspace definitions from vlt.json, * canonicalizing the result into the effective `{[group:string]:string[]}` * form. * * Eg: * - `"src/*"` => `{packages:["src/*"]}` * - `{"apps": "src/*"}` => `{apps: ["src/*"]}` */ get config() { if (this.#config) return this.#config; this.#config = asWSConfig( load("workspaces", assertWSConfig) ?? {} ); return this.#config; } /** * Iterating the Monorepo object yields the workspace objects, in as close to * topological dependency order as possible. */ *[Symbol.iterator]() { const [ws] = [...this.values()]; if (!ws) return; for (const workspace of this.runSync(() => { }).keys()) { yield workspace; } } /** * Iterating the Monorepo object yields the workspace objects, in as close to * topological dependency order as possible. */ async *[Symbol.asyncIterator]() { const [ws] = [...this.values()]; if (!ws) return; for (const workspace of (await this.run(() => { })).keys()) { yield workspace; } } /** * By default, loads all workspaces reachable in the Monorepo. * * If provided with one (`string`)or more (`string[]`) group names in * the {@link LoadQuery#groups} field, then only Workspaces in the named * group(s) will be considered. Note that group names are unique string * matches, not globs. * * If provided with a set of arbitrary path arguments, then only paths * patching the provided pattern(s) will be included. * * These two options intersect, so * `load({groups:'foo', paths:'./foo/[xy]*'})` will only load the workspaces * in the group `foo` that match the paths glob. */ load(query = {}) { const paths = new Set( typeof query.paths === "string" ? [query.paths] : query.paths ?? [] ); const groups = new Set( typeof query.groups === "string" ? [query.groups] : query.groups ?? [] ); const groupsExpanded = {}; for (const [group, pattern] of Object.entries(this.config)) { if (groups.size && !groups.has(group)) continue; groupsExpanded[group] = this.#glob(pattern); } const filter = paths.size ? this.#glob([...paths]) : paths; if (paths.size && !filter.size) return this; for (const [group, matches] of Object.entries(groupsExpanded)) { for (const path of matches) { if (filter.size && !filter.has(path)) continue; this.#loadWS(path, group); } } return this; } // Either load a workspace from disk, or from our internal set, // and assign it to the named group #loadWS(path, group) { const fullpath = resolve4(this.projectRoot, path); const loaded = this.#workspaces.get(fullpath); if (loaded) return loaded; const fromCache = workspaceCache.get(fullpath); const manifest = fromCache?.manifest ?? this.packageJson.read(fullpath); const ws = fromCache ?? new Workspace(path, manifest, fullpath); if (group) ws.groups.push(group); const existingWS = this.#workspaces.get(ws.name); if (existingWS && existingWS.fullpath !== ws.fullpath) { throw error("Duplicate workspace name found", { name: ws.name, path: this.projectRoot, wanted: ws.fullpath, found: existingWS.fullpath }); } this.#workspaces.set(ws.fullpath, ws); this.#workspaces.set(ws.path, ws); this.#workspaces.set(ws.name, ws); for (const name of ws.groups) { const group2 = this.#groups.get(name) ?? /* @__PURE__ */ new Set(); group2.add(ws); this.#groups.set(name, group2); } return ws; } // can't be cached, because it's dependent on the matches set // but still worthwhile to have it defined in one place #globOptions(matches) { const inMatches = (p) => { return !!p?.relativePosix() && (matches.has(p.relativePosix()) || inMatches(p.parent)); }; return { root: this.projectRoot, cwd: this.projectRoot, posix: true, scurry: this.scurry, withFileTypes: false, ignore: { childrenIgnored: (p) => basename2(p.relativePosix()) === "node_modules" || inMatches(p), // ignore if fails to load package.json ignored: (p) => { p.lstatSync(); const rel = p.relativePosix(); if (!rel) return true; const maybeDelete = []; for (const m of matches) { if (rel.startsWith(m + "/")) return true; if (m.startsWith(rel + "/")) { maybeDelete.push(m); } } if (!p.isDirectory()) return true; const pj = p.resolve("package.json").lstatSync(); if (!pj?.isFile()) return true; try { this.packageJson.read(p.fullpath()); } catch { return true; } for (const m of maybeDelete) { matches.delete(m); } matches.add(rel); return false; } } }; } #glob(pattern) { const matches = /* @__PURE__ */ new Set(); globSync(pattern, this.#globOptions(matches)); return matches; } /** * Return the array of workspace dependencies that are found in * the loaded set, for use in calculating dependency graph order for * build operations. * * This does *not* get the full set of dependencies, or expand any * `workspace:` dependencies that are not loaded. * * Call with the `forceLoad` param set to `true` to attempt a full * load if any deps are not currently loaded. */ getDeps(ws, forceLoad = false) { const { manifest } = ws; const depWorkspaces = []; let didForceLoad = false; for (const depType of [ "dependencies", "devDependencies", "optionalDependencies", "peerDependencies" ]) { const deps = manifest[depType]; if (!deps) continue; for (const [dep, spec] of Object.entries(deps)) { if (spec.startsWith("workspace:")) { let depWS = this.#workspaces.get(dep); if (!depWS) { if (!forceLoad) continue; if (didForceLoad) continue; didForceLoad = true; this.load(); depWS = this.#workspaces.get(dep); if (!depWS) continue; } depWorkspaces.push(depWS); } } } return depWorkspaces; } onCycle(_ws, _cycle, _depPath) { } /** * Return the set of workspaces in the named group. * If the group is not one we know about, then undefined is returned. */ group(group) { return this.#groups.get(group); } /** * Get a loaded workspace by path or name. * * Note that this can only return workspaces that were ingested via a * previous call to {@link Monorepo#load}. */ get(nameOrPath) { return this.#workspaces.get(nameOrPath); } /** * get the list of all loaded workspace names used as keys */ *names() { for (const [key, ws] of this.#workspaces) { if (key === ws.name) yield key; } } /** * get the list of all loaded workspace paths used as keys */ *paths() { for (const [key, ws] of this.#workspaces) { if (key === ws.path) yield key; } } /** * get the workspace objects in no particular order. * this is ever so slightly faster than iterating, because it doesn't * explore the graph to yield results in topological dependency order, * and should be used instead when order doesn't matter. */ *values() { const seen = /* @__PURE__ */ new Set(); for (const ws of this.#workspaces.values()) { if (seen.has(ws.fullpath)) continue; seen.add(ws.fullpath); yield ws; } } /** * Get all the keys (package names and paths) for loaded workspaces. * Union of {@link Monorepo#names} and {@link Monorepo#paths} */ *keys() { for (const ws of this.values()) { yield ws.path; if (ws.name !== ws.path) yield ws.name; } } /** * Filter the monorepo object yielding the workspace objects that matches * either of the {@link WorkspacesLoadedConfig} options provided, in as close * to topological dependency order as possible. */ *filter({ workspace: namesOrPaths, "workspace-group": groupName }) { const globPatternChecks = namesOrPaths?.map( (glob) => minimatch.filter(posix.join(glob)) ); for (const ws of this) { if (groupName?.some((i) => ws.groups.includes(i))) { yield ws; continue; } if (namesOrPaths?.map((i) => posix.join(i)).some((i) => ws.keys.includes(i))) { yield ws; continue; } if (ws.keys.some((key) => globPatternChecks?.some((fn) => fn(key)))) { yield ws; } } } /** * Run an operation asynchronously over all loaded workspaces * * If the `forceLoad` param is true, then it will attempt to do a full load * when encountering a `workspace:` dependency that isn't loaded. * * Note that because the return type appears in the parameters of the * operation function, it must be set explicitly either in the operation * function signature or by calling `run<MyType>` or it'll fall back to * `unknown`, similar to `Array.reduce()`, and for the same reason. */ async run(operation, forceLoad = false) { const [ws, ...rest] = [...this.#workspaces.values()]; if (!ws) { throw error("No workspaces loaded", void 0, this.run); } return graphRun({ graph: [ws, ...rest], getDeps: (ws2) => this.getDeps(ws2, forceLoad), visit: async (ws2, signal, _, depResults) => await operation(ws2, signal, depResults), onCycle: (ws2, cycle, path) => this.onCycle(ws2, cycle, path) }); } /** * Run an operation synchronously over all loaded workspaces * * If the `forceLoad` param is true, then it will attempt to do a full load * when encountering a `workspace:` dependency that isn't loaded. * * Note that because the return type appears in the parameters of the * operation function, it must be set explicitly either in the operation * function signature or by calling `runSync<MyType>` or it'll fall back to * `unknown`, similar to `Array.reduce()`, and for the same reason. */ runSync(operation, forceLoad = false) { const [ws, ...rest] = [...this.#workspaces.values()]; if (!ws) { throw error("No workspaces loaded", void 0, this.run); } return graphRunSync({ graph: [ws, ...rest], getDeps: (ws2) => this.getDeps(ws2, forceLoad), visit: (ws2, signal, _, depResults) => operation(ws2, signal, depResults), onCycle: (ws2, cycle, path) => this.onCycle(ws2, cycle, path) }); } /** * Convenience method to instantiate and load in one call. * Returns undefined if the project is not a monorepo workspaces * root, otherwise returns the loaded Monorepo. */ static maybeLoad(projectRoot, options = { load: {} }) { const config = load("workspaces", assertWSConfig); if (!config) return; return new _Monorepo(projectRoot, { load: {}, ...options }); } /** * Convenience method to instantiate and load in one call. * Throws if called on a directory that is not a workspaces root. */ static load(projectRoot, options = { load: {} }) { const { load: load2 = {} } = options; return new _Monorepo(projectRoot, { ...options, load: load2 }); } }; var workspaceCache = /* @__PURE__ */ new Map(); var Workspace = class { id; path; fullpath; manifest; groups = []; name; #keys; constructor(path, manifest, fullpath) { this.id = joinDepIDTuple(["workspace", path]); workspaceCache.set(fullpath, this); this.path = path; this.fullpath = fullpath; this.manifest = manifest; this.name = manifest.name ?? path; } get keys() { if (this.#keys) { return this.#keys; } this.#keys = [this.name, this.path, this.fullpath]; return this.#keys; } }; export { platformCheck, pickManifest, spawn, revs, resolve, clone, is, isClean, getUser, PackageJson, asWSConfig, assertWSConfig, Monorepo }; //# sourceMappingURL=chunk-GTAUGWLW.js.map