vlt
Version:
The vlt CLI
1,478 lines (1,475 loc) • 55.5 kB
JavaScript
var global = globalThis;
import {Buffer} from "node:buffer";
import {setTimeout,clearTimeout,setImmediate,clearImmediate,setInterval,clearInterval} from "node:timers";
import {createRequire as _vlt_createRequire} from "node:module";
var require = _vlt_createRequire(import.meta.filename);
import {
create,
list
} from "./chunk-SLTPNBLH.js";
import {
minimatch
} from "./chunk-6YRWYWZQ.js";
import {
Spec2 as Spec
} from "./chunk-264UXZEG.js";
import {
error
} from "./chunk-RV3EHS4P.js";
import {
__commonJS,
__require,
__toESM
} from "./chunk-AECDW3EJ.js";
// ../../node_modules/.pnpm/minipass@7.1.2/node_modules/minipass/dist/commonjs/index.js
var require_commonjs = __commonJS({
"../../node_modules/.pnpm/minipass@7.1.2/node_modules/minipass/dist/commonjs/index.js"(exports) {
"use strict";
var __importDefault = exports && exports.__importDefault || function(mod) {
return mod && mod.__esModule ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
var proc = typeof process === "object" && process ? process : {
stdout: null,
stderr: null
};
var node_events_1 = __require("node:events");
var node_stream_1 = __importDefault(__require("node:stream"));
var node_string_decoder_1 = __require("node:string_decoder");
var isStream = (s) => !!s && typeof s === "object" && (s instanceof Minipass || s instanceof node_stream_1.default || (0, exports.isReadable)(s) || (0, exports.isWritable)(s));
exports.isStream = isStream;
var isReadable = (s) => !!s && typeof s === "object" && s instanceof node_events_1.EventEmitter && typeof s.pipe === "function" && // node core Writable streams have a pipe() method, but it throws
s.pipe !== node_stream_1.default.Writable.prototype.pipe;
exports.isReadable = isReadable;
var isWritable = (s) => !!s && typeof s === "object" && s instanceof node_events_1.EventEmitter && typeof s.write === "function" && typeof s.end === "function";
exports.isWritable = isWritable;
var EOF = Symbol("EOF");
var MAYBE_EMIT_END = Symbol("maybeEmitEnd");
var EMITTED_END = Symbol("emittedEnd");
var EMITTING_END = Symbol("emittingEnd");
var EMITTED_ERROR = Symbol("emittedError");
var CLOSED = Symbol("closed");
var READ = Symbol("read");
var FLUSH = Symbol("flush");
var FLUSHCHUNK = Symbol("flushChunk");
var ENCODING = Symbol("encoding");
var DECODER = Symbol("decoder");
var FLOWING = Symbol("flowing");
var PAUSED = Symbol("paused");
var RESUME = Symbol("resume");
var BUFFER = Symbol("buffer");
var PIPES = Symbol("pipes");
var BUFFERLENGTH = Symbol("bufferLength");
var BUFFERPUSH = Symbol("bufferPush");
var BUFFERSHIFT = Symbol("bufferShift");
var OBJECTMODE = Symbol("objectMode");
var DESTROYED = Symbol("destroyed");
var ERROR = Symbol("error");
var EMITDATA = Symbol("emitData");
var EMITEND = Symbol("emitEnd");
var EMITEND2 = Symbol("emitEnd2");
var ASYNC = Symbol("async");
var ABORT = Symbol("abort");
var ABORTED = Symbol("aborted");
var SIGNAL = Symbol("signal");
var DATALISTENERS = Symbol("dataListeners");
var DISCARDED = Symbol("discarded");
var defer = (fn) => Promise.resolve().then(fn);
var nodefer = (fn) => fn();
var isEndish = (ev) => ev === "end" || ev === "finish" || ev === "prefinish";
var isArrayBufferLike = (b) => b instanceof ArrayBuffer || !!b && typeof b === "object" && b.constructor && b.constructor.name === "ArrayBuffer" && b.byteLength >= 0;
var isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
var Pipe = class {
src;
dest;
opts;
ondrain;
constructor(src, dest, opts) {
this.src = src;
this.dest = dest;
this.opts = opts;
this.ondrain = () => src[RESUME]();
this.dest.on("drain", this.ondrain);
}
unpipe() {
this.dest.removeListener("drain", this.ondrain);
}
// only here for the prototype
/* c8 ignore start */
proxyErrors(_er) {
}
/* c8 ignore stop */
end() {
this.unpipe();
if (this.opts.end)
this.dest.end();
}
};
var PipeProxyErrors = class extends Pipe {
unpipe() {
this.src.removeListener("error", this.proxyErrors);
super.unpipe();
}
constructor(src, dest, opts) {
super(src, dest, opts);
this.proxyErrors = (er) => dest.emit("error", er);
src.on("error", this.proxyErrors);
}
};
var isObjectModeOptions = (o) => !!o.objectMode;
var isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== "buffer";
var Minipass = class extends node_events_1.EventEmitter {
[FLOWING] = false;
[PAUSED] = false;
[PIPES] = [];
[BUFFER] = [];
[OBJECTMODE];
[ENCODING];
[ASYNC];
[DECODER];
[EOF] = false;
[EMITTED_END] = false;
[EMITTING_END] = false;
[CLOSED] = false;
[EMITTED_ERROR] = null;
[BUFFERLENGTH] = 0;
[DESTROYED] = false;
[SIGNAL];
[ABORTED] = false;
[DATALISTENERS] = 0;
[DISCARDED] = false;
/**
* true if the stream can be written
*/
writable = true;
/**
* true if the stream can be read
*/
readable = true;
/**
* If `RType` is Buffer, then options do not need to be provided.
* Otherwise, an options object must be provided to specify either
* {@link Minipass.SharedOptions.objectMode} or
* {@link Minipass.SharedOptions.encoding}, as appropriate.
*/
constructor(...args) {
const options = args[0] || {};
super();
if (options.objectMode && typeof options.encoding === "string") {
throw new TypeError("Encoding and objectMode may not be used together");
}
if (isObjectModeOptions(options)) {
this[OBJECTMODE] = true;
this[ENCODING] = null;
} else if (isEncodingOptions(options)) {
this[ENCODING] = options.encoding;
this[OBJECTMODE] = false;
} else {
this[OBJECTMODE] = false;
this[ENCODING] = null;
}
this[ASYNC] = !!options.async;
this[DECODER] = this[ENCODING] ? new node_string_decoder_1.StringDecoder(this[ENCODING]) : null;
if (options && options.debugExposeBuffer === true) {
Object.defineProperty(this, "buffer", { get: () => this[BUFFER] });
}
if (options && options.debugExposePipes === true) {
Object.defineProperty(this, "pipes", { get: () => this[PIPES] });
}
const { signal } = options;
if (signal) {
this[SIGNAL] = signal;
if (signal.aborted) {
this[ABORT]();
} else {
signal.addEventListener("abort", () => this[ABORT]());
}
}
}
/**
* The amount of data stored in the buffer waiting to be read.
*
* For Buffer strings, this will be the total byte length.
* For string encoding streams, this will be the string character length,
* according to JavaScript's `string.length` logic.
* For objectMode streams, this is a count of the items waiting to be
* emitted.
*/
get bufferLength() {
return this[BUFFERLENGTH];
}
/**
* The `BufferEncoding` currently in use, or `null`
*/
get encoding() {
return this[ENCODING];
}
/**
* @deprecated - This is a read only property
*/
set encoding(_enc) {
throw new Error("Encoding must be set at instantiation time");
}
/**
* @deprecated - Encoding may only be set at instantiation time
*/
setEncoding(_enc) {
throw new Error("Encoding must be set at instantiation time");
}
/**
* True if this is an objectMode stream
*/
get objectMode() {
return this[OBJECTMODE];
}
/**
* @deprecated - This is a read-only property
*/
set objectMode(_om) {
throw new Error("objectMode must be set at instantiation time");
}
/**
* true if this is an async stream
*/
get ["async"]() {
return this[ASYNC];
}
/**
* Set to true to make this stream async.
*
* Once set, it cannot be unset, as this would potentially cause incorrect
* behavior. Ie, a sync stream can be made async, but an async stream
* cannot be safely made sync.
*/
set ["async"](a) {
this[ASYNC] = this[ASYNC] || !!a;
}
// drop everything and get out of the flow completely
[ABORT]() {
this[ABORTED] = true;
this.emit("abort", this[SIGNAL]?.reason);
this.destroy(this[SIGNAL]?.reason);
}
/**
* True if the stream has been aborted.
*/
get aborted() {
return this[ABORTED];
}
/**
* No-op setter. Stream aborted status is set via the AbortSignal provided
* in the constructor options.
*/
set aborted(_) {
}
write(chunk, encoding, cb) {
if (this[ABORTED])
return false;
if (this[EOF])
throw new Error("write after end");
if (this[DESTROYED]) {
this.emit("error", Object.assign(new Error("Cannot call write after a stream was destroyed"), { code: "ERR_STREAM_DESTROYED" }));
return true;
}
if (typeof encoding === "function") {
cb = encoding;
encoding = "utf8";
}
if (!encoding)
encoding = "utf8";
const fn = this[ASYNC] ? defer : nodefer;
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk)) {
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
} else if (isArrayBufferLike(chunk)) {
chunk = Buffer.from(chunk);
} else if (typeof chunk !== "string") {
throw new Error("Non-contiguous data written to non-objectMode stream");
}
}
if (this[OBJECTMODE]) {
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
this[FLUSH](true);
if (this[FLOWING])
this.emit("data", chunk);
else
this[BUFFERPUSH](chunk);
if (this[BUFFERLENGTH] !== 0)
this.emit("readable");
if (cb)
fn(cb);
return this[FLOWING];
}
if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0)
this.emit("readable");
if (cb)
fn(cb);
return this[FLOWING];
}
if (typeof chunk === "string" && // unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
chunk = Buffer.from(chunk, encoding);
}
if (Buffer.isBuffer(chunk) && this[ENCODING]) {
chunk = this[DECODER].write(chunk);
}
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
this[FLUSH](true);
if (this[FLOWING])
this.emit("data", chunk);
else
this[BUFFERPUSH](chunk);
if (this[BUFFERLENGTH] !== 0)
this.emit("readable");
if (cb)
fn(cb);
return this[FLOWING];
}
/**
* Low-level explicit read method.
*
* In objectMode, the argument is ignored, and one item is returned if
* available.
*
* `n` is the number of bytes (or in the case of encoding streams,
* characters) to consume. If `n` is not provided, then the entire buffer
* is returned, or `null` is returned if no data is available.
*
* If `n` is greater that the amount of data in the internal buffer,
* then `null` is returned.
*/
read(n) {
if (this[DESTROYED])
return null;
this[DISCARDED] = false;
if (this[BUFFERLENGTH] === 0 || n === 0 || n && n > this[BUFFERLENGTH]) {
this[MAYBE_EMIT_END]();
return null;
}
if (this[OBJECTMODE])
n = null;
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
this[BUFFER] = [
this[ENCODING] ? this[BUFFER].join("") : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])
];
}
const ret = this[READ](n || null, this[BUFFER][0]);
this[MAYBE_EMIT_END]();
return ret;
}
[READ](n, chunk) {
if (this[OBJECTMODE])
this[BUFFERSHIFT]();
else {
const c = chunk;
if (n === c.length || n === null)
this[BUFFERSHIFT]();
else if (typeof c === "string") {
this[BUFFER][0] = c.slice(n);
chunk = c.slice(0, n);
this[BUFFERLENGTH] -= n;
} else {
this[BUFFER][0] = c.subarray(n);
chunk = c.subarray(0, n);
this[BUFFERLENGTH] -= n;
}
}
this.emit("data", chunk);
if (!this[BUFFER].length && !this[EOF])
this.emit("drain");
return chunk;
}
end(chunk, encoding, cb) {
if (typeof chunk === "function") {
cb = chunk;
chunk = void 0;
}
if (typeof encoding === "function") {
cb = encoding;
encoding = "utf8";
}
if (chunk !== void 0)
this.write(chunk, encoding);
if (cb)
this.once("end", cb);
this[EOF] = true;
this.writable = false;
if (this[FLOWING] || !this[PAUSED])
this[MAYBE_EMIT_END]();
return this;
}
// don't let the internal resume be overwritten
[RESUME]() {
if (this[DESTROYED])
return;
if (!this[DATALISTENERS] && !this[PIPES].length) {
this[DISCARDED] = true;
}
this[PAUSED] = false;
this[FLOWING] = true;
this.emit("resume");
if (this[BUFFER].length)
this[FLUSH]();
else if (this[EOF])
this[MAYBE_EMIT_END]();
else
this.emit("drain");
}
/**
* Resume the stream if it is currently in a paused state
*
* If called when there are no pipe destinations or `data` event listeners,
* this will place the stream in a "discarded" state, where all data will
* be thrown away. The discarded state is removed if a pipe destination or
* data handler is added, if pause() is called, or if any synchronous or
* asynchronous iteration is started.
*/
resume() {
return this[RESUME]();
}
/**
* Pause the stream
*/
pause() {
this[FLOWING] = false;
this[PAUSED] = true;
this[DISCARDED] = false;
}
/**
* true if the stream has been forcibly destroyed
*/
get destroyed() {
return this[DESTROYED];
}
/**
* true if the stream is currently in a flowing state, meaning that
* any writes will be immediately emitted.
*/
get flowing() {
return this[FLOWING];
}
/**
* true if the stream is currently in a paused state
*/
get paused() {
return this[PAUSED];
}
[BUFFERPUSH](chunk) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1;
else
this[BUFFERLENGTH] += chunk.length;
this[BUFFER].push(chunk);
}
[BUFFERSHIFT]() {
if (this[OBJECTMODE])
this[BUFFERLENGTH] -= 1;
else
this[BUFFERLENGTH] -= this[BUFFER][0].length;
return this[BUFFER].shift();
}
[FLUSH](noDrain = false) {
do {
} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length);
if (!noDrain && !this[BUFFER].length && !this[EOF])
this.emit("drain");
}
[FLUSHCHUNK](chunk) {
this.emit("data", chunk);
return this[FLOWING];
}
/**
* Pipe all data emitted by this stream into the destination provided.
*
* Triggers the flow of data.
*/
pipe(dest, opts) {
if (this[DESTROYED])
return dest;
this[DISCARDED] = false;
const ended = this[EMITTED_END];
opts = opts || {};
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false;
else
opts.end = opts.end !== false;
opts.proxyErrors = !!opts.proxyErrors;
if (ended) {
if (opts.end)
dest.end();
} else {
this[PIPES].push(!opts.proxyErrors ? new Pipe(this, dest, opts) : new PipeProxyErrors(this, dest, opts));
if (this[ASYNC])
defer(() => this[RESUME]());
else
this[RESUME]();
}
return dest;
}
/**
* Fully unhook a piped destination stream.
*
* If the destination stream was the only consumer of this stream (ie,
* there are no other piped destinations or `'data'` event listeners)
* then the flow of data will stop until there is another consumer or
* {@link Minipass#resume} is explicitly called.
*/
unpipe(dest) {
const p = this[PIPES].find((p2) => p2.dest === dest);
if (p) {
if (this[PIPES].length === 1) {
if (this[FLOWING] && this[DATALISTENERS] === 0) {
this[FLOWING] = false;
}
this[PIPES] = [];
} else
this[PIPES].splice(this[PIPES].indexOf(p), 1);
p.unpipe();
}
}
/**
* Alias for {@link Minipass#on}
*/
addListener(ev, handler) {
return this.on(ev, handler);
}
/**
* Mostly identical to `EventEmitter.on`, with the following
* behavior differences to prevent data loss and unnecessary hangs:
*
* - Adding a 'data' event handler will trigger the flow of data
*
* - Adding a 'readable' event handler when there is data waiting to be read
* will cause 'readable' to be emitted immediately.
*
* - Adding an 'endish' event handler ('end', 'finish', etc.) which has
* already passed will cause the event to be emitted immediately and all
* handlers removed.
*
* - Adding an 'error' event handler after an error has been emitted will
* cause the event to be re-emitted immediately with the error previously
* raised.
*/
on(ev, handler) {
const ret = super.on(ev, handler);
if (ev === "data") {
this[DISCARDED] = false;
this[DATALISTENERS]++;
if (!this[PIPES].length && !this[FLOWING]) {
this[RESUME]();
}
} else if (ev === "readable" && this[BUFFERLENGTH] !== 0) {
super.emit("readable");
} else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev);
this.removeAllListeners(ev);
} else if (ev === "error" && this[EMITTED_ERROR]) {
const h = handler;
if (this[ASYNC])
defer(() => h.call(this, this[EMITTED_ERROR]));
else
h.call(this, this[EMITTED_ERROR]);
}
return ret;
}
/**
* Alias for {@link Minipass#off}
*/
removeListener(ev, handler) {
return this.off(ev, handler);
}
/**
* Mostly identical to `EventEmitter.off`
*
* If a 'data' event handler is removed, and it was the last consumer
* (ie, there are no pipe destinations or other 'data' event listeners),
* then the flow of data will stop until there is another consumer or
* {@link Minipass#resume} is explicitly called.
*/
off(ev, handler) {
const ret = super.off(ev, handler);
if (ev === "data") {
this[DATALISTENERS] = this.listeners("data").length;
if (this[DATALISTENERS] === 0 && !this[DISCARDED] && !this[PIPES].length) {
this[FLOWING] = false;
}
}
return ret;
}
/**
* Mostly identical to `EventEmitter.removeAllListeners`
*
* If all 'data' event handlers are removed, and they were the last consumer
* (ie, there are no pipe destinations), then the flow of data will stop
* until there is another consumer or {@link Minipass#resume} is explicitly
* called.
*/
removeAllListeners(ev) {
const ret = super.removeAllListeners(ev);
if (ev === "data" || ev === void 0) {
this[DATALISTENERS] = 0;
if (!this[DISCARDED] && !this[PIPES].length) {
this[FLOWING] = false;
}
}
return ret;
}
/**
* true if the 'end' event has been emitted
*/
get emittedEnd() {
return this[EMITTED_END];
}
[MAYBE_EMIT_END]() {
if (!this[EMITTING_END] && !this[EMITTED_END] && !this[DESTROYED] && this[BUFFER].length === 0 && this[EOF]) {
this[EMITTING_END] = true;
this.emit("end");
this.emit("prefinish");
this.emit("finish");
if (this[CLOSED])
this.emit("close");
this[EMITTING_END] = false;
}
}
/**
* Mostly identical to `EventEmitter.emit`, with the following
* behavior differences to prevent data loss and unnecessary hangs:
*
* If the stream has been destroyed, and the event is something other
* than 'close' or 'error', then `false` is returned and no handlers
* are called.
*
* If the event is 'end', and has already been emitted, then the event
* is ignored. If the stream is in a paused or non-flowing state, then
* the event will be deferred until data flow resumes. If the stream is
* async, then handlers will be called on the next tick rather than
* immediately.
*
* If the event is 'close', and 'end' has not yet been emitted, then
* the event will be deferred until after 'end' is emitted.
*
* If the event is 'error', and an AbortSignal was provided for the stream,
* and there are no listeners, then the event is ignored, matching the
* behavior of node core streams in the presense of an AbortSignal.
*
* If the event is 'finish' or 'prefinish', then all listeners will be
* removed after emitting the event, to prevent double-firing.
*/
emit(ev, ...args) {
const data = args[0];
if (ev !== "error" && ev !== "close" && ev !== DESTROYED && this[DESTROYED]) {
return false;
} else if (ev === "data") {
return !this[OBJECTMODE] && !data ? false : this[ASYNC] ? (defer(() => this[EMITDATA](data)), true) : this[EMITDATA](data);
} else if (ev === "end") {
return this[EMITEND]();
} else if (ev === "close") {
this[CLOSED] = true;
if (!this[EMITTED_END] && !this[DESTROYED])
return false;
const ret2 = super.emit("close");
this.removeAllListeners("close");
return ret2;
} else if (ev === "error") {
this[EMITTED_ERROR] = data;
super.emit(ERROR, data);
const ret2 = !this[SIGNAL] || this.listeners("error").length ? super.emit("error", data) : false;
this[MAYBE_EMIT_END]();
return ret2;
} else if (ev === "resume") {
const ret2 = super.emit("resume");
this[MAYBE_EMIT_END]();
return ret2;
} else if (ev === "finish" || ev === "prefinish") {
const ret2 = super.emit(ev);
this.removeAllListeners(ev);
return ret2;
}
const ret = super.emit(ev, ...args);
this[MAYBE_EMIT_END]();
return ret;
}
[EMITDATA](data) {
for (const p of this[PIPES]) {
if (p.dest.write(data) === false)
this.pause();
}
const ret = this[DISCARDED] ? false : super.emit("data", data);
this[MAYBE_EMIT_END]();
return ret;
}
[EMITEND]() {
if (this[EMITTED_END])
return false;
this[EMITTED_END] = true;
this.readable = false;
return this[ASYNC] ? (defer(() => this[EMITEND2]()), true) : this[EMITEND2]();
}
[EMITEND2]() {
if (this[DECODER]) {
const data = this[DECODER].end();
if (data) {
for (const p of this[PIPES]) {
p.dest.write(data);
}
if (!this[DISCARDED])
super.emit("data", data);
}
}
for (const p of this[PIPES]) {
p.end();
}
const ret = super.emit("end");
this.removeAllListeners("end");
return ret;
}
/**
* Return a Promise that resolves to an array of all emitted data once
* the stream ends.
*/
async collect() {
const buf = Object.assign([], {
dataLength: 0
});
if (!this[OBJECTMODE])
buf.dataLength = 0;
const p = this.promise();
this.on("data", (c) => {
buf.push(c);
if (!this[OBJECTMODE])
buf.dataLength += c.length;
});
await p;
return buf;
}
/**
* Return a Promise that resolves to the concatenation of all emitted data
* once the stream ends.
*
* Not allowed on objectMode streams.
*/
async concat() {
if (this[OBJECTMODE]) {
throw new Error("cannot concat in objectMode");
}
const buf = await this.collect();
return this[ENCODING] ? buf.join("") : Buffer.concat(buf, buf.dataLength);
}
/**
* Return a void Promise that resolves once the stream ends.
*/
async promise() {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error("stream destroyed")));
this.on("error", (er) => reject(er));
this.on("end", () => resolve());
});
}
/**
* Asynchronous `for await of` iteration.
*
* This will continue emitting all chunks until the stream terminates.
*/
[Symbol.asyncIterator]() {
this[DISCARDED] = false;
let stopped = false;
const stop = async () => {
this.pause();
stopped = true;
return { value: void 0, done: true };
};
const next = () => {
if (stopped)
return stop();
const res = this.read();
if (res !== null)
return Promise.resolve({ done: false, value: res });
if (this[EOF])
return stop();
let resolve;
let reject;
const onerr = (er) => {
this.off("data", ondata);
this.off("end", onend);
this.off(DESTROYED, ondestroy);
stop();
reject(er);
};
const ondata = (value) => {
this.off("error", onerr);
this.off("end", onend);
this.off(DESTROYED, ondestroy);
this.pause();
resolve({ value, done: !!this[EOF] });
};
const onend = () => {
this.off("error", onerr);
this.off("data", ondata);
this.off(DESTROYED, ondestroy);
stop();
resolve({ done: true, value: void 0 });
};
const ondestroy = () => onerr(new Error("stream destroyed"));
return new Promise((res2, rej) => {
reject = rej;
resolve = res2;
this.once(DESTROYED, ondestroy);
this.once("error", onerr);
this.once("end", onend);
this.once("data", ondata);
});
};
return {
next,
throw: stop,
return: stop,
[Symbol.asyncIterator]() {
return this;
}
};
}
/**
* Synchronous `for of` iteration.
*
* The iteration will terminate when the internal buffer runs out, even
* if the stream has not yet terminated.
*/
[Symbol.iterator]() {
this[DISCARDED] = false;
let stopped = false;
const stop = () => {
this.pause();
this.off(ERROR, stop);
this.off(DESTROYED, stop);
this.off("end", stop);
stopped = true;
return { done: true, value: void 0 };
};
const next = () => {
if (stopped)
return stop();
const value = this.read();
return value === null ? stop() : { done: false, value };
};
this.once("end", stop);
this.once(ERROR, stop);
this.once(DESTROYED, stop);
return {
next,
throw: stop,
return: stop,
[Symbol.iterator]() {
return this;
}
};
}
/**
* Destroy a stream, preventing it from being used for any further purpose.
*
* If the stream has a `close()` method, then it will be called on
* destruction.
*
* After destruction, any attempt to write data, read data, or emit most
* events will be ignored.
*
* If an error argument is provided, then it will be emitted in an
* 'error' event.
*/
destroy(er) {
if (this[DESTROYED]) {
if (er)
this.emit("error", er);
else
this.emit(DESTROYED);
return this;
}
this[DESTROYED] = true;
this[DISCARDED] = true;
this[BUFFER].length = 0;
this[BUFFERLENGTH] = 0;
const wc = this;
if (typeof wc.close === "function" && !this[CLOSED])
wc.close();
if (er)
this.emit("error", er);
else
this.emit(DESTROYED);
return this;
}
/**
* Alias for {@link isStream}
*
* Former export location, maintained for backwards compatibility.
*
* @deprecated
*/
static get isStream() {
return exports.isStream;
}
};
exports.Minipass = Minipass;
}
});
// ../../node_modules/.pnpm/ssri@12.0.0/node_modules/ssri/lib/index.js
var require_lib = __commonJS({
"../../node_modules/.pnpm/ssri@12.0.0/node_modules/ssri/lib/index.js"(exports, module) {
"use strict";
var crypto = __require("node:crypto");
var { Minipass } = require_commonjs();
var SPEC_ALGORITHMS = ["sha512", "sha384", "sha256"];
var DEFAULT_ALGORITHMS = ["sha512"];
var BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i;
var SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/;
var STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/;
var VCHAR_REGEX = /^[\x21-\x7E]+$/;
var getOptString = (options) => options?.length ? `?${options.join("?")}` : "";
var IntegrityStream = class extends Minipass {
#emittedIntegrity;
#emittedSize;
#emittedVerified;
constructor(opts) {
super();
this.size = 0;
this.opts = opts;
this.#getOptions();
if (opts?.algorithms) {
this.algorithms = [...opts.algorithms];
} else {
this.algorithms = [...DEFAULT_ALGORITHMS];
}
if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) {
this.algorithms.push(this.algorithm);
}
this.hashes = this.algorithms.map(crypto.createHash);
}
#getOptions() {
this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null;
this.expectedSize = this.opts?.size;
if (!this.sri) {
this.algorithm = null;
} else if (this.sri.isHash) {
this.goodSri = true;
this.algorithm = this.sri.algorithm;
} else {
this.goodSri = !this.sri.isEmpty();
this.algorithm = this.sri.pickAlgorithm(this.opts);
}
this.digests = this.goodSri ? this.sri[this.algorithm] : null;
this.optString = getOptString(this.opts?.options);
}
on(ev, handler) {
if (ev === "size" && this.#emittedSize) {
return handler(this.#emittedSize);
}
if (ev === "integrity" && this.#emittedIntegrity) {
return handler(this.#emittedIntegrity);
}
if (ev === "verified" && this.#emittedVerified) {
return handler(this.#emittedVerified);
}
return super.on(ev, handler);
}
emit(ev, data) {
if (ev === "end") {
this.#onEnd();
}
return super.emit(ev, data);
}
write(data) {
this.size += data.length;
this.hashes.forEach((h) => h.update(data));
return super.write(data);
}
#onEnd() {
if (!this.goodSri) {
this.#getOptions();
}
const newSri = parse(this.hashes.map((h, i) => {
return `${this.algorithms[i]}-${h.digest("base64")}${this.optString}`;
}).join(" "), this.opts);
const match = this.goodSri && newSri.match(this.sri, this.opts);
if (typeof this.expectedSize === "number" && this.size !== this.expectedSize) {
const err = new Error(`stream size mismatch when checking ${this.sri}.
Wanted: ${this.expectedSize}
Found: ${this.size}`);
err.code = "EBADSIZE";
err.found = this.size;
err.expected = this.expectedSize;
err.sri = this.sri;
this.emit("error", err);
} else if (this.sri && !match) {
const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`);
err.code = "EINTEGRITY";
err.found = newSri;
err.expected = this.digests;
err.algorithm = this.algorithm;
err.sri = this.sri;
this.emit("error", err);
} else {
this.#emittedSize = this.size;
this.emit("size", this.size);
this.#emittedIntegrity = newSri;
this.emit("integrity", newSri);
if (match) {
this.#emittedVerified = match;
this.emit("verified", match);
}
}
}
};
var Hash = class {
get isHash() {
return true;
}
constructor(hash, opts) {
const strict = opts?.strict;
this.source = hash.trim();
this.digest = "";
this.algorithm = "";
this.options = [];
const match = this.source.match(
strict ? STRICT_SRI_REGEX : SRI_REGEX
);
if (!match) {
return;
}
if (strict && !SPEC_ALGORITHMS.includes(match[1])) {
return;
}
this.algorithm = match[1];
this.digest = match[2];
const rawOpts = match[3];
if (rawOpts) {
this.options = rawOpts.slice(1).split("?");
}
}
hexDigest() {
return this.digest && Buffer.from(this.digest, "base64").toString("hex");
}
toJSON() {
return this.toString();
}
match(integrity, opts) {
const other = parse(integrity, opts);
if (!other) {
return false;
}
if (other.isIntegrity) {
const algo = other.pickAlgorithm(opts, [this.algorithm]);
if (!algo) {
return false;
}
const foundHash = other[algo].find((hash) => hash.digest === this.digest);
if (foundHash) {
return foundHash;
}
return false;
}
return other.digest === this.digest ? other : false;
}
toString(opts) {
if (opts?.strict) {
if (!// The spec has very restricted productions for algorithms.
// https://www.w3.org/TR/CSP2/#source-list-syntax
(SPEC_ALGORITHMS.includes(this.algorithm) && // Usually, if someone insists on using a "different" base64, we
// leave it as-is, since there's multiple standards, and the
// specified is not a URL-safe variant.
// https://www.w3.org/TR/CSP2/#base64_value
this.digest.match(BASE64_REGEX) && // Option syntax is strictly visual chars.
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
// https://tools.ietf.org/html/rfc5234#appendix-B.1
this.options.every((opt) => opt.match(VCHAR_REGEX)))) {
return "";
}
}
return `${this.algorithm}-${this.digest}${getOptString(this.options)}`;
}
};
function integrityHashToString(toString, sep, opts, hashes) {
const toStringIsNotEmpty = toString !== "";
let shouldAddFirstSep = false;
let complement = "";
const lastIndex = hashes.length - 1;
for (let i = 0; i < lastIndex; i++) {
const hashString = Hash.prototype.toString.call(hashes[i], opts);
if (hashString) {
shouldAddFirstSep = true;
complement += hashString;
complement += sep;
}
}
const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts);
if (finalHashString) {
shouldAddFirstSep = true;
complement += finalHashString;
}
if (toStringIsNotEmpty && shouldAddFirstSep) {
return toString + sep + complement;
}
return toString + complement;
}
var Integrity = class {
get isIntegrity() {
return true;
}
toJSON() {
return this.toString();
}
isEmpty() {
return Object.keys(this).length === 0;
}
toString(opts) {
let sep = opts?.sep || " ";
let toString = "";
if (opts?.strict) {
sep = sep.replace(/\S+/g, " ");
for (const hash of SPEC_ALGORITHMS) {
if (this[hash]) {
toString = integrityHashToString(toString, sep, opts, this[hash]);
}
}
} else {
for (const hash of Object.keys(this)) {
toString = integrityHashToString(toString, sep, opts, this[hash]);
}
}
return toString;
}
concat(integrity, opts) {
const other = typeof integrity === "string" ? integrity : stringify(integrity, opts);
return parse(`${this.toString(opts)} ${other}`, opts);
}
hexDigest() {
return parse(this, { single: true }).hexDigest();
}
// add additional hashes to an integrity value, but prevent
// *changing* an existing integrity hash.
merge(integrity, opts) {
const other = parse(integrity, opts);
for (const algo in other) {
if (this[algo]) {
if (!this[algo].find((hash) => other[algo].find((otherhash) => hash.digest === otherhash.digest))) {
throw new Error("hashes do not match, cannot update integrity");
}
} else {
this[algo] = other[algo];
}
}
}
match(integrity, opts) {
const other = parse(integrity, opts);
if (!other) {
return false;
}
const algo = other.pickAlgorithm(opts, Object.keys(this));
return !!algo && this[algo] && other[algo] && this[algo].find(
(hash) => other[algo].find(
(otherhash) => hash.digest === otherhash.digest
)
) || false;
}
// Pick the highest priority algorithm present, optionally also limited to a
// set of hashes found in another integrity. When limiting it may return
// nothing.
pickAlgorithm(opts, hashes) {
const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash;
const keys = Object.keys(this).filter((k) => {
if (hashes?.length) {
return hashes.includes(k);
}
return true;
});
if (keys.length) {
return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc);
}
return null;
}
};
module.exports.parse = parse;
function parse(sri, opts) {
if (!sri) {
return null;
}
if (typeof sri === "string") {
return _parse(sri, opts);
} else if (sri.algorithm && sri.digest) {
const fullSri = new Integrity();
fullSri[sri.algorithm] = [sri];
return _parse(stringify(fullSri, opts), opts);
} else {
return _parse(stringify(sri, opts), opts);
}
}
function _parse(integrity, opts) {
if (opts?.single) {
return new Hash(integrity, opts);
}
const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => {
const hash = new Hash(string, opts);
if (hash.algorithm && hash.digest) {
const algo = hash.algorithm;
if (!acc[algo]) {
acc[algo] = [];
}
acc[algo].push(hash);
}
return acc;
}, new Integrity());
return hashes.isEmpty() ? null : hashes;
}
module.exports.stringify = stringify;
function stringify(obj, opts) {
if (obj.algorithm && obj.digest) {
return Hash.prototype.toString.call(obj, opts);
} else if (typeof obj === "string") {
return stringify(parse(obj, opts), opts);
} else {
return Integrity.prototype.toString.call(obj, opts);
}
}
module.exports.fromHex = fromHex;
function fromHex(hexDigest, algorithm, opts) {
const optString = getOptString(opts?.options);
return parse(
`${algorithm}-${Buffer.from(hexDigest, "hex").toString("base64")}${optString}`,
opts
);
}
module.exports.fromData = fromData2;
function fromData2(data, opts) {
const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS];
const optString = getOptString(opts?.options);
return algorithms.reduce((acc, algo) => {
const digest = crypto.createHash(algo).update(data).digest("base64");
const hash = new Hash(
`${algo}-${digest}${optString}`,
opts
);
if (hash.algorithm && hash.digest) {
const hashAlgo = hash.algorithm;
if (!acc[hashAlgo]) {
acc[hashAlgo] = [];
}
acc[hashAlgo].push(hash);
}
return acc;
}, new Integrity());
}
module.exports.fromStream = fromStream;
function fromStream(stream, opts) {
const istream = integrityStream(opts);
return new Promise((resolve, reject) => {
stream.pipe(istream);
stream.on("error", reject);
istream.on("error", reject);
let sri;
istream.on("integrity", (s) => {
sri = s;
});
istream.on("end", () => resolve(sri));
istream.resume();
});
}
module.exports.checkData = checkData;
function checkData(data, sri, opts) {
sri = parse(sri, opts);
if (!sri || !Object.keys(sri).length) {
if (opts?.error) {
throw Object.assign(
new Error("No valid integrity hashes to check against"),
{
code: "EINTEGRITY"
}
);
} else {
return false;
}
}
const algorithm = sri.pickAlgorithm(opts);
const digest = crypto.createHash(algorithm).update(data).digest("base64");
const newSri = parse({ algorithm, digest });
const match = newSri.match(sri, opts);
opts = opts || {};
if (match || !opts.error) {
return match;
} else if (typeof opts.size === "number" && data.length !== opts.size) {
const err = new Error(`data size mismatch when checking ${sri}.
Wanted: ${opts.size}
Found: ${data.length}`);
err.code = "EBADSIZE";
err.found = data.length;
err.expected = opts.size;
err.sri = sri;
throw err;
} else {
const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`);
err.code = "EINTEGRITY";
err.found = newSri;
err.expected = sri;
err.algorithm = algorithm;
err.sri = sri;
throw err;
}
}
module.exports.checkStream = checkStream;
function checkStream(stream, sri, opts) {
opts = opts || /* @__PURE__ */ Object.create(null);
opts.integrity = sri;
sri = parse(sri, opts);
if (!sri || !Object.keys(sri).length) {
return Promise.reject(Object.assign(
new Error("No valid integrity hashes to check against"),
{
code: "EINTEGRITY"
}
));
}
const checker = integrityStream(opts);
return new Promise((resolve, reject) => {
stream.pipe(checker);
stream.on("error", reject);
checker.on("error", reject);
let verified;
checker.on("verified", (s) => {
verified = s;
});
checker.on("end", () => resolve(verified));
checker.resume();
});
}
module.exports.integrityStream = integrityStream;
function integrityStream(opts = /* @__PURE__ */ Object.create(null)) {
return new IntegrityStream(opts);
}
module.exports.create = createIntegrity;
function createIntegrity(opts) {
const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS];
const optString = getOptString(opts?.options);
const hashes = algorithms.map(crypto.createHash);
return {
update: function(chunk, enc) {
hashes.forEach((h) => h.update(chunk, enc));
return this;
},
digest: function() {
const integrity = algorithms.reduce((acc, algo) => {
const digest = hashes.shift().digest("base64");
const hash = new Hash(
`${algo}-${digest}${optString}`,
opts
);
if (hash.algorithm && hash.digest) {
const hashAlgo = hash.algorithm;
if (!acc[hashAlgo]) {
acc[hashAlgo] = [];
}
acc[hashAlgo].push(hash);
}
return acc;
}, new Integrity());
return integrity;
}
};
}
var NODE_HASHES = crypto.getHashes();
var DEFAULT_PRIORITY = [
"md5",
"whirlpool",
"sha1",
"sha224",
"sha256",
"sha384",
"sha512",
// TODO - it's unclear _which_ of these Node will actually use as its name
// for the algorithm, so we guesswork it based on the OpenSSL names.
"sha3",
"sha3-256",
"sha3-384",
"sha3-512",
"sha3_256",
"sha3_384",
"sha3_512"
].filter((algo) => NODE_HASHES.includes(algo));
function getPrioritizedHash(algo1, algo2) {
return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) ? algo1 : algo2;
}
}
});
// ../../src/cli-sdk/src/pack-tarball.ts
var ssri = __toESM(require_lib(), 1);
import assert from "node:assert";
import { existsSync, statSync } from "node:fs";
import { join } from "node:path";
var replaceWorkspaceAndCatalogSpecs = (manifest_, config) => {
const manifest = structuredClone(manifest_);
const { monorepo, catalog = {}, catalogs = {} } = config.options;
const depTypes = [
"dependencies",
"devDependencies",
"optionalDependencies",
"peerDependencies"
];
for (const depType of depTypes) {
const deps = manifest[depType];
if (!deps || typeof deps !== "object") continue;
const depsObj = deps;
for (const [depName, depSpec] of Object.entries(depsObj)) {
if (typeof depSpec !== "string") continue;
const spec = Spec.parse(`${depName}@${depSpec}`, {
catalog,
catalogs
});
switch (spec.type) {
case "workspace": {
assert(
monorepo,
error(`No workspace configuration found for ${depName}`, {
found: depName
})
);
const workspaceName = spec.workspace;
assert(
workspaceName,
error(`No workspace name found for ${depName}`, {
found: depName
})
);
const workspace = monorepo.get(workspaceName);
assert(
workspace,
error(`Workspace '${workspaceName}' not found`, {
found: workspaceName,
validOptions: Array.from(monorepo.keys())
})
);
const actualVersion = workspace.manifest.version;
assert(
actualVersion,
error(
`No version found for workspace '${workspaceName}'`,
{
found: workspaceName,
wanted: "package version"
}
)
);
depsObj[depName] = actualVersion;
break;
}
case "catalog": {
const catalogName = spec.catalog || "";
const targetCatalog = catalogName ? catalogs[catalogName] : catalog;
assert(
targetCatalog,
error(`Catalog '${catalogName}' not found`, {
found: catalogName,
validOptions: Object.keys(catalogs)
})
);
const actualVersion = targetCatalog[depName];
assert(
actualVersion,
error(
`Package '${depName}' not found in catalog '${catalogName || "default"}'