vlt
Version:
The vlt CLI
939 lines (927 loc) • 26.7 kB
JavaScript
var global = globalThis;
import {Buffer} from "node:buffer";
import {setTimeout,clearTimeout,setImmediate,clearImmediate,setInterval,clearInterval} from "node:timers";
import {createRequire as _vlt_createRequire} from "node:module";
var require = _vlt_createRequire(import.meta.filename);
import {
Header,
Pax,
create
} from "./chunk-SLTPNBLH.js";
import {
Monorepo
} from "./chunk-D36DAG56.js";
import {
clone,
pickManifest,
resolve,
revs
} from "./chunk-LEKM5RQR.js";
import {
PackageJson
} from "./chunk-YWPMIBJS.js";
import {
RegistryClient
} from "./chunk-QAFV2NQX.js";
import {
rimraf
} from "./chunk-GADRCS54.js";
import {
Spec2 as Spec
} from "./chunk-264UXZEG.js";
import {
asError,
asPackument,
isIntegrity
} from "./chunk-X4RDKJKD.js";
import {
XDG,
error
} from "./chunk-RV3EHS4P.js";
// ../../src/tar/src/unpack.ts
import { randomBytes } from "node:crypto";
import { lstat, mkdir, rename, writeFile } from "node:fs/promises";
import { basename, dirname, parse, resolve as resolve2 } from "node:path";
import { unzip as unzipCB } from "node:zlib";
// ../../src/tar/src/find-tar-dir.ts
var findTarDir = (path, tarDir) => {
if (tarDir !== void 0) return tarDir;
if (!path) return void 0;
const i = path.indexOf("/", path.startsWith("./") ? 2 : 0);
if (i === -1) return void 0;
const chomp = path.substring(0, i);
if (chomp === "." || chomp === ".." || chomp === "" || chomp === "./." || chomp === "./.." || chomp === "./") {
return void 0;
}
return chomp + "/";
};
// ../../src/tar/src/unpack.ts
var unzip = async (input) => new Promise(
(res, rej) => (
/* c8 ignore start */
unzipCB(input, (er, result) => er ? rej(er) : res(result))
)
/* c8 ignore stop */
);
var exists = async (path) => {
try {
await lstat(path);
return true;
} catch {
return false;
}
};
var id = 1;
var tmp = randomBytes(6).toString("hex") + ".";
var tmpSuffix = () => tmp + String(id++);
var checkFs = (h, tarDir) => {
if (!h.path) return false;
if (!tarDir) return false;
h.path = h.path.replace(/[\\/]+/g, "/");
const parsed = parse(h.path);
if (parsed.root) return false;
const p = h.path.replace(/\\/, "/");
if (/(\/|)^\.\.(\/|$)/.test(p)) return false;
if (!p.startsWith(tarDir)) return false;
return true;
};
var write = async (path, body, executable = false) => {
await mkdirp(dirname(path));
await writeFile(path, body, {
mode: executable ? 511 : 438
});
};
var made = /* @__PURE__ */ new Set();
var making = /* @__PURE__ */ new Map();
var mkdirp = async (d) => {
if (!made.has(d)) {
const m = making.get(d) ?? mkdir(d, { recursive: true, mode: 511 }).then(
() => making.delete(d)
);
making.set(d, m);
await m;
made.add(d);
}
};
var unpack = async (tarData, target) => {
const isGzip = tarData[0] === 31 && tarData[1] === 139;
await unpackUnzipped(
isGzip ? await unzip(tarData) : tarData,
target
);
};
var unpackUnzipped = async (buffer, target) => {
const isGzip = buffer[0] === 31 && buffer[1] === 139;
if (isGzip) {
throw error("still gzipped after unzipping", {
found: isGzip,
wanted: false
});
}
if (buffer.length % 512 !== 0) {
throw error("Invalid tarball: length not divisible by 512", {
found: buffer.length
});
}
if (buffer.length < 1024) {
throw error(
"Invalid tarball: not terminated by 1024 null bytes",
{ found: buffer.length }
);
}
for (let i = buffer.length - 1024; i < buffer.length; i++) {
if (buffer[i] !== 0) {
throw error(
"Invalid tarball: not terminated by 1024 null bytes",
{ found: buffer.subarray(i, i + 10) }
);
}
}
const tmp2 = dirname(target) + "/." + basename(target) + "." + tmpSuffix();
const og = tmp2 + ".ORIGINAL";
await Promise.all([rimraf(tmp2), rimraf(og)]);
let succeeded = false;
try {
let tarDir = void 0;
let offset = 0;
let h;
let ex = void 0;
let gex = void 0;
while (offset < buffer.length && !(h = new Header(buffer, offset, ex, gex)).nullBlock) {
offset += 512;
ex = void 0;
gex = void 0;
const size = h.size ?? 0;
const body = buffer.subarray(offset, offset + size);
if (!h.cksumValid) continue;
offset += 512 * Math.ceil(size / 512);
switch (h.type) {
case "File":
if (!tarDir) tarDir = findTarDir(h.path, tarDir);
if (!tarDir) continue;
if (!checkFs(h, tarDir)) continue;
await write(
resolve2(tmp2, h.path.substring(tarDir.length)),
body,
// if it's world-executable, it's an executable
// otherwise, make it read-only.
1 === ((h.mode ?? 1638) & 1)
);
break;
case "Directory":
if (!tarDir) tarDir = findTarDir(h.path, tarDir);
if (!tarDir) continue;
if (!checkFs(h, tarDir)) continue;
await mkdirp(resolve2(tmp2, h.path.substring(tarDir.length)));
break;
case "GlobalExtendedHeader":
gex = Pax.parse(body.toString(), gex, true);
break;
case "ExtendedHeader":
case "OldExtendedHeader":
ex = Pax.parse(body.toString(), ex, false);
break;
case "NextFileHasLongPath":
case "OldGnuLongPath":
ex ??= /* @__PURE__ */ Object.create(null);
ex.path = body.toString().replace(/\0.*/, "");
break;
}
}
const targetExists = await exists(target);
if (targetExists) await rename(target, og);
await rename(tmp2, target);
if (targetExists) await rimraf(og);
succeeded = true;
} finally {
if (!succeeded) {
if (await exists(og)) {
await rimraf(target);
await rename(og, target);
}
await rimraf(tmp2);
}
}
};
// ../../src/tar/src/pool.ts
import os from "node:os";
// ../../src/tar/src/unpack-request.ts
var ID = 1;
var UnpackRequest = class {
id = ID++;
tarData;
target;
resolve;
reject;
promise = new Promise((res, rej) => {
this.resolve = res;
this.reject = rej;
});
constructor(tarData, target) {
this.tarData = tarData;
this.target = target;
}
};
// ../../src/tar/src/worker.ts
var isObj = (o) => !!o && typeof o === "object";
var isResponseOK = (o) => isObj(o) && typeof o.id === "number" && o.ok === true;
var Worker = class {
onMessage;
constructor(onMessage) {
this.onMessage = onMessage;
}
async process(req) {
const { target, tarData, id: id2 } = req;
try {
await unpack(tarData, target);
const m = { id: id2, ok: true };
this.onMessage(m);
} catch (error2) {
const m = { id: id2, error: error2 };
this.onMessage(m);
}
}
};
// ../../src/tar/src/pool.ts
var Pool = class {
/**
* Number of workers to emplly. Defaults to 1 less than the number of
* CPUs, or 1.
*/
/* c8 ignore next */
jobs = 8 * (Math.max(os.availableParallelism(), 2) - 1);
/**
* Set of currently active worker threads
*/
workers = /* @__PURE__ */ new Set();
/**
* Queue of requests awaiting an available worker
*/
queue = [];
/**
* Requests that have been assigned to a worker, but have not yet
* been confirmed completed.
*/
pending = /* @__PURE__ */ new Map();
// handle a message from the worker
#onMessage(w, m) {
const { id: id2 } = m;
const ur = this.pending.get(id2);
if (!ur) return;
if (isResponseOK(m)) {
ur.resolve();
} else {
ur.reject(
error(
asError(m.error, "failed without error message").message,
{
found: m,
cause: m.error
}
)
);
}
const next = this.queue.shift();
if (!next) {
this.workers.delete(w);
} else {
void w.process(next);
}
}
// create a new worker
#createWorker(req) {
const w = new Worker(
(m) => this.#onMessage(w, m)
);
this.workers.add(w);
void w.process(req);
}
/**
* Provide the tardata to be unpacked, and the location where it's to be
* placed. Will create a new worker up to the `jobs` value, and then start
* pushing in the queue for workers to pick up as they become available.
*
* Returned promise resolves when the provided tarball has been extracted.
*/
async unpack(tarData, target) {
const ur = new UnpackRequest(tarData, target);
this.pending.set(ur.id, ur);
if (this.workers.size < this.jobs) {
this.#createWorker(ur);
} else {
this.queue.push(ur);
}
return ur.promise;
}
};
// ../../src/package-info/src/index.ts
import { randomBytes as randomBytes2 } from "node:crypto";
import { readFile, rm as rm2, stat, symlink } from "node:fs/promises";
import {
basename as basename2,
dirname as dirname2,
resolve as pathResolve,
relative
} from "node:path";
// ../../src/package-info/src/rename.ts
import { rename as fsRename, rm } from "node:fs/promises";
var { platform } = process;
var rename2 = platform !== "win32" ? fsRename : async function(oldPath, newPath) {
let retries = 3;
const retry = async (er) => {
if (retries > 0 && er.code === "EPERM") {
retries--;
await rm(newPath, { recursive: true, force: true });
return fsRename(oldPath, newPath).then(() => {
}, retry);
} else {
throw er;
}
};
return fsRename(oldPath, newPath).then(() => {
}, retry);
};
// ../../src/package-info/src/index.ts
var xdg = new XDG("vlt");
var PackageInfoClient = class {
#registryClient;
#projectRoot;
#tarPool;
options;
#resolutions = /* @__PURE__ */ new Map();
packageJson;
monorepo;
#trustedIntegrities = /* @__PURE__ */ new Map();
get registryClient() {
if (!this.#registryClient) {
this.#registryClient = new RegistryClient(this.options);
}
return this.#registryClient;
}
get tarPool() {
if (!this.#tarPool) this.#tarPool = new Pool();
return this.#tarPool;
}
constructor(options = {}) {
this.options = options;
this.#projectRoot = options.projectRoot || process.cwd();
this.packageJson = options.packageJson ?? new PackageJson();
const wsLoad = {
...options.workspace?.length && { paths: options.workspace },
...options["workspace-group"]?.length && {
groups: options["workspace-group"]
}
};
this.monorepo = options.monorepo ?? Monorepo.maybeLoad(this.#projectRoot, {
load: wsLoad,
packageJson: this.packageJson
});
}
async extract(spec, target, options = {}) {
if (typeof spec === "string")
spec = Spec.parse(spec, this.options);
const { from = this.#projectRoot, integrity, resolved } = options;
const f = spec.final;
const r = integrity && resolved ? { resolved, integrity, spec } : await this.resolve(spec, options);
switch (f.type) {
case "git": {
const {
gitRemote,
gitCommittish,
remoteURL,
gitSelectorParsed
} = f;
if (!remoteURL) {
if (!gitRemote)
throw this.#resolveError(
spec,
options,
"no remote on git: specifier"
);
const { path } = gitSelectorParsed ?? {};
if (path !== void 0) {
const tmp2 = pathResolve(
dirname2(target),
`.TEMP.${basename2(target)}-${randomBytes2(6).toString("hex")}`
);
await clone(gitRemote, gitCommittish, tmp2, { spec });
const src = pathResolve(tmp2, path);
await rename2(src, target);
void rm2(tmp2, { recursive: true, force: true });
} else {
await clone(gitRemote, gitCommittish, target, { spec });
void rm2(target + "/.git", { recursive: true });
}
return r;
}
}
case "registry":
case "remote": {
const trustIntegrity = this.#trustedIntegrities.get(r.resolved) === r.integrity;
const response = await this.registryClient.request(
r.resolved,
{
integrity: r.integrity,
trustIntegrity
}
);
if (response.statusCode !== 200) {
throw this.#resolveError(
spec,
options,
"failed to fetch tarball",
{
url: r.resolved,
response
}
);
}
if (!trustIntegrity && response.checkIntegrity({ spec, url: resolved })) {
this.#trustedIntegrities.set(r.resolved, response.integrity);
}
try {
await this.tarPool.unpack(response.buffer(), target);
} catch (er) {
throw this.#resolveError(
spec,
options,
"tar unpack failed",
{ cause: er }
);
}
return r;
}
case "file": {
const { file } = f;
if (file === void 0)
throw this.#resolveError(spec, options, "no file path");
const path = pathResolve(from, file);
const st = await stat(path);
if (st.isFile()) {
try {
await this.tarPool.unpack(
await this.tarball(spec, options),
target
);
} catch (er) {
throw this.#resolveError(
spec,
options,
"tar unpack failed",
{ cause: er }
);
}
} else if (st.isDirectory()) {
const rel = relative(dirname2(target), path);
await symlink(rel, target, "dir");
} else {
throw this.#resolveError(
spec,
options,
"file: specifier does not resolve to directory or tarball"
);
}
return r;
}
case "workspace": {
const ws = this.#getWS(spec, options);
const rel = relative(dirname2(target), ws.fullpath);
await symlink(rel, target, "dir");
return r;
}
}
}
#getWS(spec, options) {
const { workspace } = spec;
if (workspace === void 0)
throw this.#resolveError(spec, options, "no workspace ID");
if (!this.monorepo) {
throw this.#resolveError(
spec,
options,
"Not in a monorepo, cannot resolve workspace spec"
);
}
const ws = this.monorepo.get(workspace);
if (!ws) {
throw this.#resolveError(spec, options, "workspace not found", {
wanted: workspace
});
}
return ws;
}
async tarball(spec, options = {}) {
if (typeof spec === "string")
spec = Spec.parse(spec, this.options);
const f = spec.final;
switch (f.type) {
case "registry": {
const { dist } = await this.manifest(spec, options);
if (!dist)
throw this.#resolveError(
spec,
options,
"no dist object found in manifest"
);
const { tarball, integrity } = dist;
if (!tarball) {
throw this.#resolveError(
spec,
options,
"no tarball found in manifest.dist"
);
}
const trustIntegrity = this.#trustedIntegrities.get(tarball) === integrity;
const response = await this.registryClient.request(tarball, {
...options,
integrity,
trustIntegrity
});
if (response.statusCode !== 200) {
throw this.#resolveError(
spec,
options,
"failed to fetch tarball",
{ response, url: tarball }
);
}
if (!trustIntegrity && response.checkIntegrity({ spec, url: tarball })) {
this.#trustedIntegrities.set(tarball, response.integrity);
}
return response.buffer();
}
case "git": {
const {
remoteURL,
gitRemote,
gitCommittish,
gitSelectorParsed
} = f;
const s = spec;
if (!remoteURL) {
if (!gitRemote) {
throw this.#resolveError(
spec,
options,
"no remote on git: specifier"
);
}
const { path } = gitSelectorParsed ?? {};
return await this.#tmpdir(async (dir) => {
await clone(gitRemote, gitCommittish, dir + "/package", {
spec: s
});
let cwd = dir;
if (path !== void 0) {
const src = pathResolve(dir, "package", path);
cwd = dirname2(src);
const pkg = pathResolve(cwd, "package");
if (src !== pkg) {
const rand = randomBytes2(6).toString("hex");
await rename2(pkg, pkg + rand).catch(() => {
});
await rename2(src, pkg);
}
}
return create({ cwd, gzip: true }, ["package"]).concat();
});
}
}
case "remote": {
const { remoteURL } = f;
if (!remoteURL) {
throw this.#resolveError(spec, options);
}
const response = await this.registryClient.request(remoteURL);
if (response.statusCode !== 200) {
throw this.#resolveError(
spec,
options,
"failed to fetch URL",
{ response, url: remoteURL }
);
}
return response.buffer();
}
case "file": {
const { file } = f;
if (file === void 0)
throw this.#resolveError(spec, options, "no file path");
const { from = this.#projectRoot } = options;
const path = pathResolve(from, file);
const st = await stat(path);
if (st.isDirectory()) {
const p = dirname2(path);
const b = basename2(path);
return create({ cwd: p, gzip: true }, [b]).concat();
}
return readFile(path);
}
case "workspace": {
const ws = this.#getWS(spec, options);
const p = dirname2(ws.fullpath);
const b = basename2(ws.fullpath);
return create({ cwd: p, gzip: true }, [b]).concat();
}
}
}
async manifest(spec, options = {}) {
const { from = this.#projectRoot } = options;
if (typeof spec === "string")
spec = Spec.parse(spec, this.options);
const f = spec.final;
switch (f.type) {
case "registry": {
const mani = pickManifest(
await this.packument(f, options),
spec,
options
);
if (!mani) throw this.#resolveError(spec, options);
const { integrity, tarball } = mani.dist ?? {};
if (isIntegrity(integrity) && tarball) {
const registryOrigin = new URL(String(f.registry)).origin;
const tgzOrigin = new URL(tarball).origin;
if (tgzOrigin === registryOrigin) {
this.#trustedIntegrities.set(tarball, integrity);
}
}
return mani;
}
case "git": {
const {
gitRemote,
gitCommittish,
remoteURL,
gitSelectorParsed
} = f;
if (!remoteURL) {
const s = spec;
if (!gitRemote)
throw this.#resolveError(spec, options, "no git remote");
return await this.#tmpdir(async (dir) => {
await clone(gitRemote, gitCommittish, dir, { spec: s });
const { path } = gitSelectorParsed ?? {};
const pkgDir = path !== void 0 ? pathResolve(dir, path) : dir;
return this.packageJson.read(pkgDir);
});
}
}
case "remote": {
const { remoteURL } = f;
if (!remoteURL) {
throw this.#resolveError(
spec,
options,
"no remoteURL on remote specifier"
);
}
const s = spec;
return await this.#tmpdir(async (dir) => {
const response = await this.registryClient.request(remoteURL);
if (response.statusCode !== 200) {
throw this.#resolveError(
s,
options,
"failed to fetch URL",
{ response, url: remoteURL }
);
}
const buf = response.buffer();
try {
await this.tarPool.unpack(buf, dir);
} catch (er) {
throw this.#resolveError(
s,
options,
"tar unpack failed",
{ cause: er }
);
}
return this.packageJson.read(dir);
});
}
case "file": {
const { file } = f;
if (file === void 0)
throw this.#resolveError(spec, options, "no file path");
const path = pathResolve(from, file);
const st = await stat(path);
if (st.isDirectory()) {
return this.packageJson.read(path);
}
const s = spec;
return await this.#tmpdir(async (dir) => {
try {
await this.tarPool.unpack(await readFile(path), dir);
} catch (er) {
throw this.#resolveError(
s,
options,
"tar unpack failed",
{ cause: er }
);
}
return this.packageJson.read(dir);
});
}
case "workspace": {
return this.#getWS(spec, options).manifest;
}
}
}
async packument(spec, options = {}) {
if (typeof spec === "string")
spec = Spec.parse(spec, this.options);
const f = spec.final;
switch (f.type) {
// RevDoc is the equivalent of a packument for a git repo
case "git": {
const { gitRemote } = f;
if (!gitRemote) {
throw this.#resolveError(
spec,
options,
"git remote could not be determined"
);
}
const revDoc = await revs(gitRemote, {
cwd: this.options.projectRoot
});
if (!revDoc) throw this.#resolveError(spec, options);
return asPackument(revDoc);
}
// these are all faked packuments
case "file":
case "workspace":
case "remote": {
const manifest = await this.manifest(f, options);
return {
name: manifest.name ?? "",
"dist-tags": {
latest: manifest.version ?? ""
},
versions: {
[manifest.version ?? ""]: manifest
}
};
}
case "registry": {
const { registry, name } = f;
const pakuURL = new URL(name, registry);
const response = await this.registryClient.request(pakuURL, {
headers: {
accept: "application/json"
}
});
if (response.statusCode !== 200) {
throw this.#resolveError(
spec,
options,
"failed to fetch packument",
{
url: pakuURL,
response
}
);
}
return response.json();
}
}
}
async resolve(spec, options = {}) {
const memoKey = String(spec);
if (typeof spec === "string")
spec = Spec.parse(spec, this.options);
const memo = this.#resolutions.get(memoKey);
if (memo) return memo;
const f = spec.final;
switch (f.type) {
case "file": {
const { file } = f;
if (!file || !f.file) {
throw this.#resolveError(
spec,
options,
"no path on file: specifier"
);
}
const { from = this.#projectRoot } = options;
const resolved = pathResolve(from, f.file);
const r = { resolved, spec };
this.#resolutions.set(memoKey, r);
return r;
}
case "remote": {
const { remoteURL } = f;
if (!remoteURL)
throw this.#resolveError(
spec,
options,
"no URL in remote specifier"
);
const r = { resolved: remoteURL, spec };
this.#resolutions.set(memoKey, r);
return r;
}
case "workspace": {
const ws = this.#getWS(spec, options);
return {
resolved: ws.fullpath,
spec
};
}
case "registry": {
const mani = await this.manifest(spec, options);
if (mani.dist) {
const { integrity, tarball, signatures } = mani.dist;
if (tarball) {
const r = {
resolved: tarball,
integrity,
signatures,
spec
};
this.#resolutions.set(memoKey, r);
return r;
}
}
throw this.#resolveError(spec, options);
}
case "git": {
const { gitRemote, remoteURL, gitSelectorParsed } = f;
if (remoteURL && gitSelectorParsed?.path === void 0) {
const r = { resolved: remoteURL, spec };
this.#resolutions.set(memoKey, r);
return r;
}
if (!gitRemote) {
throw this.#resolveError(
spec,
options,
"no remote on git specifier"
);
}
const rev = await resolve(gitRemote, f.gitCommittish, {
spec
});
if (rev) {
const r = {
resolved: `${gitRemote}#${rev.sha}`,
spec
};
if (gitSelectorParsed) {
r.resolved += Object.entries(gitSelectorParsed).filter(([_, v]) => v).map(([k, v]) => `::${k}:${v}`).join("");
}
this.#resolutions.set(memoKey, r);
return r;
}
const s = spec;
return this.#tmpdir(async (tmpdir) => {
const sha = await clone(
gitRemote,
s.gitCommittish,
tmpdir,
{
spec: s
}
);
const r = {
resolved: `${gitRemote}#${sha}`,
spec: s
};
this.#resolutions.set(memoKey, r);
return r;
});
}
}
}
async #tmpdir(fn) {
const p = `package-info/${randomBytes2(6).toString("hex")}`;
const dir = xdg.runtime(p);
try {
return await fn(dir);
} finally {
void rm2(dir, { recursive: true, force: true });
}
}
// error resolving
#resolveError(spec, options = {}, message = "Could not resolve", extra = {}) {
const { from = this.#projectRoot } = options;
const er = error(
message,
{
code: "ERESOLVE",
spec,
from,
...extra
},
this.#resolveError
);
return er;
}
};
export {
PackageInfoClient
};
//# sourceMappingURL=chunk-FZMPFIDM.js.map