@hot-updater/plugin-core
Version:
React Native OTA solution for self-hosted
1,202 lines (1,183 loc) • 1.1 MB
JavaScript
import { createRequire } from "node:module";
import fs from "fs";
import path from "path";
import fs$1 from "fs/promises";
import { cosmiconfig, cosmiconfigSync } from "cosmiconfig";
import { TypeScriptLoader } from "cosmiconfig-typescript-loader";
import process$1 from "node:process";
import os from "node:os";
import tty from "node:tty";
import { transform } from "oxc-transform";
//#region rolldown:runtime
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __commonJS = (cb, mod) => function() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") for (var keys$1 = __getOwnPropNames(from), i$3 = 0, n = keys$1.length, key; i$3 < n; i$3++) {
key = keys$1[i$3];
if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
get: ((k) => from[k]).bind(null, key),
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
});
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
value: mod,
enumerable: true
}) : target, mod));
var __require = /* @__PURE__ */ createRequire(import.meta.url);
//#endregion
//#region ../../node_modules/.pnpm/picocolors@1.1.1/node_modules/picocolors/picocolors.js
var require_picocolors = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/picocolors@1.1.1/node_modules/picocolors/picocolors.js": ((exports, module) => {
let p = process || {}, argv = p.argv || [], env$1 = p.env || {};
let isColorSupported = !(!!env$1.NO_COLOR || argv.includes("--no-color")) && (!!env$1.FORCE_COLOR || argv.includes("--color") || p.platform === "win32" || (p.stdout || {}).isTTY && env$1.TERM !== "dumb" || !!env$1.CI);
let formatter = (open, close, replace = open) => (input) => {
let string$1 = "" + input, index = string$1.indexOf(close, open.length);
return ~index ? open + replaceClose(string$1, close, replace, index) + close : open + string$1 + close;
};
let replaceClose = (string$1, close, replace, index) => {
let result = "", cursor = 0;
do {
result += string$1.substring(cursor, index) + replace;
cursor = index + close.length;
index = string$1.indexOf(close, cursor);
} while (~index);
return result + string$1.substring(cursor);
};
let createColors = (enabled = isColorSupported) => {
let f = enabled ? formatter : () => String;
return {
isColorSupported: enabled,
reset: f("\x1B[0m", "\x1B[0m"),
bold: f("\x1B[1m", "\x1B[22m", "\x1B[22m\x1B[1m"),
dim: f("\x1B[2m", "\x1B[22m", "\x1B[22m\x1B[2m"),
italic: f("\x1B[3m", "\x1B[23m"),
underline: f("\x1B[4m", "\x1B[24m"),
inverse: f("\x1B[7m", "\x1B[27m"),
hidden: f("\x1B[8m", "\x1B[28m"),
strikethrough: f("\x1B[9m", "\x1B[29m"),
black: f("\x1B[30m", "\x1B[39m"),
red: f("\x1B[31m", "\x1B[39m"),
green: f("\x1B[32m", "\x1B[39m"),
yellow: f("\x1B[33m", "\x1B[39m"),
blue: f("\x1B[34m", "\x1B[39m"),
magenta: f("\x1B[35m", "\x1B[39m"),
cyan: f("\x1B[36m", "\x1B[39m"),
white: f("\x1B[37m", "\x1B[39m"),
gray: f("\x1B[90m", "\x1B[39m"),
bgBlack: f("\x1B[40m", "\x1B[49m"),
bgRed: f("\x1B[41m", "\x1B[49m"),
bgGreen: f("\x1B[42m", "\x1B[49m"),
bgYellow: f("\x1B[43m", "\x1B[49m"),
bgBlue: f("\x1B[44m", "\x1B[49m"),
bgMagenta: f("\x1B[45m", "\x1B[49m"),
bgCyan: f("\x1B[46m", "\x1B[49m"),
bgWhite: f("\x1B[47m", "\x1B[49m"),
blackBright: f("\x1B[90m", "\x1B[39m"),
redBright: f("\x1B[91m", "\x1B[39m"),
greenBright: f("\x1B[92m", "\x1B[39m"),
yellowBright: f("\x1B[93m", "\x1B[39m"),
blueBright: f("\x1B[94m", "\x1B[39m"),
magentaBright: f("\x1B[95m", "\x1B[39m"),
cyanBright: f("\x1B[96m", "\x1B[39m"),
whiteBright: f("\x1B[97m", "\x1B[39m"),
bgBlackBright: f("\x1B[100m", "\x1B[49m"),
bgRedBright: f("\x1B[101m", "\x1B[49m"),
bgGreenBright: f("\x1B[102m", "\x1B[49m"),
bgYellowBright: f("\x1B[103m", "\x1B[49m"),
bgBlueBright: f("\x1B[104m", "\x1B[49m"),
bgMagentaBright: f("\x1B[105m", "\x1B[49m"),
bgCyanBright: f("\x1B[106m", "\x1B[49m"),
bgWhiteBright: f("\x1B[107m", "\x1B[49m")
};
};
module.exports = createColors();
module.exports.createColors = createColors;
}) });
//#endregion
//#region src/log.ts
var import_picocolors$1 = /* @__PURE__ */ __toESM(require_picocolors(), 1);
const log = {
normal: (message) => console.log(message),
success: (message) => console.log(import_picocolors$1.default.green(message)),
info: (message) => console.log(import_picocolors$1.default.blue(message)),
error: (message) => console.log(import_picocolors$1.default.red(message)),
warn: (message) => console.log(import_picocolors$1.default.yellow(message)),
debug: (message) => console.log(import_picocolors$1.default.gray(message))
};
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/graph/getPackageDependencies.js
var require_getPackageDependencies = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/graph/getPackageDependencies.js": ((exports) => {
Object.defineProperty(exports, "__esModule", { value: true });
exports.getPackageDependencies = void 0;
function getPackageDependencies(info, packages, options = { withDevDependencies: true }) {
const deps = [];
if (info.dependencies) {
for (const dep of Object.keys(info.dependencies)) if (dep !== info.name && packages.has(dep)) deps.push(dep);
}
if (info.devDependencies && options.withDevDependencies) {
for (const dep of Object.keys(info.devDependencies)) if (dep !== info.name && packages.has(dep)) deps.push(dep);
}
if (info.peerDependencies && options.withPeerDependencies) {
for (const dep of Object.keys(info.peerDependencies)) if (dep !== info.name && packages.has(dep)) deps.push(dep);
}
return deps;
}
exports.getPackageDependencies = getPackageDependencies;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/isCachingEnabled.js
var require_isCachingEnabled = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/isCachingEnabled.js": ((exports) => {
Object.defineProperty(exports, "__esModule", { value: true });
exports.isCachingEnabled = exports.setCachingEnabled = void 0;
let cachingEnabled = true;
/** Enable or disable caching for all utilities that support caching */
function setCachingEnabled(enabled) {
cachingEnabled = enabled;
}
exports.setCachingEnabled = setCachingEnabled;
function isCachingEnabled() {
return cachingEnabled;
}
exports.isCachingEnabled = isCachingEnabled;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/dependencies/transitiveDeps.js
var require_transitiveDeps = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/dependencies/transitiveDeps.js": ((exports) => {
Object.defineProperty(exports, "__esModule", { value: true });
exports.getTransitiveProviders = exports.getTransitiveConsumers = exports.getDependentMap = void 0;
const getPackageDependencies_1$2 = require_getPackageDependencies();
const isCachingEnabled_1$4 = require_isCachingEnabled();
const graphCache = /* @__PURE__ */ new Map();
function memoizedKey(packages, scope = []) {
return JSON.stringify({
packages,
scope
});
}
function getPackageGraph(packages, scope = []) {
const internalPackages = new Set(Object.keys(packages));
const key = memoizedKey(packages, scope);
if ((0, isCachingEnabled_1$4.isCachingEnabled)() && graphCache.has(key)) return graphCache.get(key);
const edges = [];
const visited = /* @__PURE__ */ new Set();
const stack = scope.length > 0 ? [...scope] : Object.keys(packages);
while (stack.length > 0) {
const pkg = stack.pop();
if (visited.has(pkg)) continue;
visited.add(pkg);
const info = packages[pkg];
const deps = (0, getPackageDependencies_1$2.getPackageDependencies)(info, internalPackages);
if (deps.length > 0) for (const dep of deps) {
stack.push(dep);
edges.push([dep, pkg]);
}
else edges.push([null, pkg]);
}
graphCache.set(key, edges);
return edges;
}
function getDependentMap$1(packages) {
const graph = getPackageGraph(packages);
const map = /* @__PURE__ */ new Map();
for (const [from, to] of graph) {
if (!map.has(to)) map.set(to, /* @__PURE__ */ new Set());
if (from) map.get(to).add(from);
}
return map;
}
exports.getDependentMap = getDependentMap$1;
/**
* For a package graph of `a->b->c` (where `b` depends on `a`), transitive consumers of `a` are `b` & `c`
* and their consumers (or what are the consequences of `a`)
* @deprecated Do not use
*/
function getTransitiveConsumers(targets, packages, scope = []) {
const graph = getPackageGraph(packages, scope);
const pkgQueue = [...targets];
const visited = /* @__PURE__ */ new Set();
while (pkgQueue.length > 0) {
const pkg = pkgQueue.shift();
if (!visited.has(pkg)) {
visited.add(pkg);
for (const [from, to] of graph) if (from === pkg) pkgQueue.push(to);
}
}
return [...visited].filter((pkg) => !targets.includes(pkg));
}
exports.getTransitiveConsumers = getTransitiveConsumers;
/**
* For a package graph of `a->b->c` (where `b` depends on `a`), transitive providers of `c` are `a` & `b`
* and their providers (or what is needed to satisfy `c`)
*
* @deprecated Do not use
*/
function getTransitiveProviders(targets, packages) {
const graph = getPackageGraph(packages);
const pkgQueue = [...targets];
const visited = /* @__PURE__ */ new Set();
while (pkgQueue.length > 0) {
const pkg = pkgQueue.shift();
if (!visited.has(pkg)) {
visited.add(pkg);
for (const [from, to] of graph) if (to === pkg && from) pkgQueue.push(from);
}
}
return [...visited].filter((pkg) => !targets.includes(pkg));
}
exports.getTransitiveProviders = getTransitiveProviders;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/dependencies/index.js
var require_dependencies = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/dependencies/index.js": ((exports) => {
Object.defineProperty(exports, "__esModule", { value: true });
exports.getInternalDeps = exports.getTransitiveConsumers = exports.getTransitiveDependents = exports.getTransitiveProviders = exports.getTransitiveDependencies = void 0;
const transitiveDeps_1 = require_transitiveDeps();
Object.defineProperty(exports, "getTransitiveConsumers", {
enumerable: true,
get: function() {
return transitiveDeps_1.getTransitiveConsumers;
}
});
Object.defineProperty(exports, "getTransitiveProviders", {
enumerable: true,
get: function() {
return transitiveDeps_1.getTransitiveProviders;
}
});
const getPackageDependencies_1$1 = require_getPackageDependencies();
exports.getTransitiveDependencies = transitiveDeps_1.getTransitiveProviders;
exports.getTransitiveDependents = transitiveDeps_1.getTransitiveConsumers;
/** @deprecated Do not use */
exports.getInternalDeps = getPackageDependencies_1$1.getPackageDependencies;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/infoFromPackageJson.js
var require_infoFromPackageJson = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/infoFromPackageJson.js": ((exports) => {
Object.defineProperty(exports, "__esModule", { value: true });
exports.infoFromPackageJson = void 0;
function infoFromPackageJson(packageJson, packageJsonPath) {
return {
packageJsonPath,
...packageJson
};
}
exports.infoFromPackageJson = infoFromPackageJson;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/workspaces/getWorkspaceRoot.js
var require_getWorkspaceRoot = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/workspaces/getWorkspaceRoot.js": ((exports) => {
Object.defineProperty(exports, "__esModule", { value: true });
exports.getWorkspaceRoot = void 0;
const implementations_1$2 = require_implementations();
/**
* Get the root directory of a workspace/monorepo, defined as the directory where the workspace
* manager config file is located.
* @param cwd Start searching from here
* @param preferredManager Search for only this manager's config file
*/
function getWorkspaceRoot(cwd, preferredManager) {
return (0, implementations_1$2.getWorkspaceManagerAndRoot)(cwd, void 0, preferredManager)?.root;
}
exports.getWorkspaceRoot = getWorkspaceRoot;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/git/git.js
var require_git$1 = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/git/git.js": ((exports) => {
Object.defineProperty(exports, "__esModule", { value: true });
exports.gitFailFast = exports.git = exports.clearGitObservers = exports.addGitObserver = exports.GitError = void 0;
const child_process_1 = __require("child_process");
var GitError = class extends Error {
constructor(message, originalError) {
if (originalError instanceof Error) super(`${message}: ${originalError.message}`);
else super(message);
this.originalError = originalError;
}
};
exports.GitError = GitError;
/**
* A global maxBuffer override for all git operations.
* Bumps up the default to 500MB instead of 1MB.
* Override this value with the `GIT_MAX_BUFFER` environment variable.
*/
const defaultMaxBuffer = process.env.GIT_MAX_BUFFER ? parseInt(process.env.GIT_MAX_BUFFER) : 500 * 1024 * 1024;
const isDebug = !!process.env.GIT_DEBUG;
const observers = [];
let observing;
/**
* Adds an observer for the git operations, e.g. for testing
* @returns a function to remove the observer
*/
function addGitObserver(observer$1) {
observers.push(observer$1);
return () => removeGitObserver(observer$1);
}
exports.addGitObserver = addGitObserver;
/** Clear all git observers */
function clearGitObservers() {
observers.splice(0, observers.length);
}
exports.clearGitObservers = clearGitObservers;
/** Remove a git observer */
function removeGitObserver(observer$1) {
const index = observers.indexOf(observer$1);
if (index > -1) observers.splice(index, 1);
}
/**
* Runs git command - use this for read-only commands
*/
function git(args, options) {
isDebug && console.log(`git ${args.join(" ")}`);
const results = (0, child_process_1.spawnSync)("git", args, {
maxBuffer: defaultMaxBuffer,
...options
});
const output = {
...results,
stderr: (results.stderr || "").toString().trimEnd(),
stdout: (results.stdout || "").toString().trimEnd(),
success: results.status === 0
};
if (isDebug) {
console.log("exited with code " + results.status);
output.stdout && console.log("git stdout:\n", output.stdout);
output.stderr && console.warn("git stderr:\n", output.stderr);
}
if (!observing) {
observing = true;
for (const observer$1 of observers) observer$1(args, output);
observing = false;
}
return output;
}
exports.git = git;
/**
* Runs git command - use this for commands that make changes to the filesystem
*/
function gitFailFast(args, options) {
const gitResult = git(args, options);
if (!gitResult.success) {
if (!options?.noExitCode) process.exitCode = 1;
throw new GitError(`CRITICAL ERROR: running git command: git ${args.join(" ")}!
${gitResult.stdout?.toString().trimEnd()}
${gitResult.stderr?.toString().trimEnd()}`);
}
}
exports.gitFailFast = gitFailFast;
}) });
//#endregion
//#region ../../node_modules/.pnpm/protocols@2.0.2/node_modules/protocols/lib/index.js
var require_lib$8 = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/protocols@2.0.2/node_modules/protocols/lib/index.js": ((exports, module) => {
/**
* protocols
* Returns the protocols of an input url.
*
* @name protocols
* @function
* @param {String|URL} input The input url (string or `URL` instance)
* @param {Boolean|Number} first If `true`, the first protocol will be returned. If number, it will represent the zero-based index of the protocols array.
* @return {Array|String} The array of protocols or the specified protocol.
*/
module.exports = function protocols$2(input, first) {
if (first === true) first = 0;
var prots = "";
if (typeof input === "string") try {
prots = new URL(input).protocol;
} catch (e) {}
else if (input && input.constructor === URL) prots = input.protocol;
var splits = prots.split(/\:|\+/).filter(Boolean);
if (typeof first === "number") return splits[first];
return splits;
};
}) });
//#endregion
//#region ../../node_modules/.pnpm/parse-path@7.0.1/node_modules/parse-path/lib/index.js
var require_lib$7 = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/parse-path@7.0.1/node_modules/parse-path/lib/index.js": ((exports, module) => {
var protocols$1 = require_lib$8();
/**
* parsePath
* Parses the input url.
*
* @name parsePath
* @function
* @param {String} url The input url.
* @return {Object} An object containing the following fields:
*
* - `protocols` (Array): An array with the url protocols (usually it has one element).
* - `protocol` (String): The first protocol or `"file"`.
* - `port` (String): The domain port (default: `""`).
* - `resource` (String): The url domain/hostname.
* - `host` (String): The url domain (including subdomain and port).
* - `user` (String): The authentication user (default: `""`).
* - `password` (String): The authentication password (default: `""`).
* - `pathname` (String): The url pathname.
* - `hash` (String): The url hash.
* - `search` (String): The url querystring value (excluding `?`).
* - `href` (String): The normalized input url.
* - `query` (Object): The url querystring, parsed as object.
* - `parse_failed` (Boolean): Whether the parsing failed or not.
*/
function parsePath$1(url) {
var output = {
protocols: [],
protocol: null,
port: null,
resource: "",
host: "",
user: "",
password: "",
pathname: "",
hash: "",
search: "",
href: url,
query: {},
parse_failed: false
};
try {
var parsed = new URL(url);
output.protocols = protocols$1(parsed);
output.protocol = output.protocols[0];
output.port = parsed.port;
output.resource = parsed.hostname;
output.host = parsed.host;
output.user = parsed.username || "";
output.password = parsed.password || "";
output.pathname = parsed.pathname;
output.hash = parsed.hash.slice(1);
output.search = parsed.search.slice(1);
output.href = parsed.href;
output.query = Object.fromEntries(parsed.searchParams);
} catch (e) {
output.protocols = ["file"];
output.protocol = output.protocols[0];
output.port = "";
output.resource = "";
output.user = "";
output.pathname = "";
output.hash = "";
output.search = "";
output.href = url;
output.query = {};
output.parse_failed = true;
}
return output;
}
module.exports = parsePath$1;
}) });
//#endregion
//#region ../../node_modules/.pnpm/parse-url@8.1.0/node_modules/parse-url/dist/index.js
var require_dist = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/parse-url@8.1.0/node_modules/parse-url/dist/index.js": ((exports, module) => {
var parsePath = require_lib$7();
function _interopDefaultLegacy(e) {
return e && typeof e === "object" && "default" in e ? e : { "default": e };
}
var parsePath__default = /* @__PURE__ */ _interopDefaultLegacy(parsePath);
const DATA_URL_DEFAULT_MIME_TYPE = "text/plain";
const DATA_URL_DEFAULT_CHARSET = "us-ascii";
const testParameter = (name, filters) => filters.some((filter) => filter instanceof RegExp ? filter.test(name) : filter === name);
const normalizeDataURL = (urlString, { stripHash }) => {
const match = /^data:(?<type>[^,]*?),(?<data>[^#]*?)(?:#(?<hash>.*))?$/.exec(urlString);
if (!match) throw new Error(`Invalid URL: ${urlString}`);
let { type, data, hash } = match.groups;
const mediaType = type.split(";");
hash = stripHash ? "" : hash;
let isBase64 = false;
if (mediaType[mediaType.length - 1] === "base64") {
mediaType.pop();
isBase64 = true;
}
const mimeType = (mediaType.shift() || "").toLowerCase();
const attributes = mediaType.map((attribute) => {
let [key, value = ""] = attribute.split("=").map((string$1) => string$1.trim());
if (key === "charset") {
value = value.toLowerCase();
if (value === DATA_URL_DEFAULT_CHARSET) return "";
}
return `${key}${value ? `=${value}` : ""}`;
}).filter(Boolean);
const normalizedMediaType = [...attributes];
if (isBase64) normalizedMediaType.push("base64");
if (normalizedMediaType.length > 0 || mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE) normalizedMediaType.unshift(mimeType);
return `data:${normalizedMediaType.join(";")},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ""}`;
};
function normalizeUrl(urlString, options) {
options = {
defaultProtocol: "http:",
normalizeProtocol: true,
forceHttp: false,
forceHttps: false,
stripAuthentication: true,
stripHash: false,
stripTextFragment: true,
stripWWW: true,
removeQueryParameters: [/^utm_\w+/i],
removeTrailingSlash: true,
removeSingleSlash: true,
removeDirectoryIndex: false,
sortQueryParameters: true,
...options
};
urlString = urlString.trim();
if (/^data:/i.test(urlString)) return normalizeDataURL(urlString, options);
if (/^view-source:/i.test(urlString)) throw new Error("`view-source:` is not supported as it is a non-standard protocol");
const hasRelativeProtocol = urlString.startsWith("//");
const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString);
if (!isRelativeUrl) urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol);
const urlObject = new URL(urlString);
if (options.forceHttp && options.forceHttps) throw new Error("The `forceHttp` and `forceHttps` options cannot be used together");
if (options.forceHttp && urlObject.protocol === "https:") urlObject.protocol = "http:";
if (options.forceHttps && urlObject.protocol === "http:") urlObject.protocol = "https:";
if (options.stripAuthentication) {
urlObject.username = "";
urlObject.password = "";
}
if (options.stripHash) urlObject.hash = "";
else if (options.stripTextFragment) urlObject.hash = urlObject.hash.replace(/#?:~:text.*?$/i, "");
if (urlObject.pathname) {
const protocolRegex = /\b[a-z][a-z\d+\-.]{1,50}:\/\//g;
let lastIndex = 0;
let result = "";
for (;;) {
const match = protocolRegex.exec(urlObject.pathname);
if (!match) break;
const protocol = match[0];
const protocolAtIndex = match.index;
const intermediate = urlObject.pathname.slice(lastIndex, protocolAtIndex);
result += intermediate.replace(/\/{2,}/g, "/");
result += protocol;
lastIndex = protocolAtIndex + protocol.length;
}
const remnant = urlObject.pathname.slice(lastIndex, urlObject.pathname.length);
result += remnant.replace(/\/{2,}/g, "/");
urlObject.pathname = result;
}
if (urlObject.pathname) try {
urlObject.pathname = decodeURI(urlObject.pathname);
} catch {}
if (options.removeDirectoryIndex === true) options.removeDirectoryIndex = [/^index\.[a-z]+$/];
if (Array.isArray(options.removeDirectoryIndex) && options.removeDirectoryIndex.length > 0) {
let pathComponents = urlObject.pathname.split("/");
const lastComponent = pathComponents[pathComponents.length - 1];
if (testParameter(lastComponent, options.removeDirectoryIndex)) {
pathComponents = pathComponents.slice(0, -1);
urlObject.pathname = pathComponents.slice(1).join("/") + "/";
}
}
if (urlObject.hostname) {
urlObject.hostname = urlObject.hostname.replace(/\.$/, "");
if (options.stripWWW && /^www\.(?!www\.)[a-z\-\d]{1,63}\.[a-z.\-\d]{2,63}$/.test(urlObject.hostname)) urlObject.hostname = urlObject.hostname.replace(/^www\./, "");
}
if (Array.isArray(options.removeQueryParameters)) {
for (const key of [...urlObject.searchParams.keys()]) if (testParameter(key, options.removeQueryParameters)) urlObject.searchParams.delete(key);
}
if (options.removeQueryParameters === true) urlObject.search = "";
if (options.sortQueryParameters) {
urlObject.searchParams.sort();
try {
urlObject.search = decodeURIComponent(urlObject.search);
} catch {}
}
if (options.removeTrailingSlash) urlObject.pathname = urlObject.pathname.replace(/\/$/, "");
const oldUrlString = urlString;
urlString = urlObject.toString();
if (!options.removeSingleSlash && urlObject.pathname === "/" && !oldUrlString.endsWith("/") && urlObject.hash === "") urlString = urlString.replace(/\/$/, "");
if ((options.removeTrailingSlash || urlObject.pathname === "/") && urlObject.hash === "" && options.removeSingleSlash) urlString = urlString.replace(/\/$/, "");
if (hasRelativeProtocol && !options.normalizeProtocol) urlString = urlString.replace(/^http:\/\//, "//");
if (options.stripProtocol) urlString = urlString.replace(/^(?:https?:)?\/\//, "");
return urlString;
}
/**
* parseUrl
* Parses the input url.
*
* **Note**: This *throws* if invalid urls are provided.
*
* @name parseUrl
* @function
* @param {String} url The input url.
* @param {Boolean|Object} normalize Whether to normalize the url or not.
* Default is `false`. If `true`, the url will
* be normalized. If an object, it will be the
* options object sent to [`normalize-url`](https://github.com/sindresorhus/normalize-url).
*
* For SSH urls, normalize won't work.
*
* @return {Object} An object containing the following fields:
*
* - `protocols` (Array): An array with the url protocols (usually it has one element).
* - `protocol` (String): The first protocol, `"ssh"` (if the url is a ssh url) or `"file"`.
* - `port` (null|Number): The domain port.
* - `resource` (String): The url domain (including subdomains).
* - `user` (String): The authentication user (usually for ssh urls).
* - `pathname` (String): The url pathname.
* - `hash` (String): The url hash.
* - `search` (String): The url querystring value.
* - `href` (String): The input url.
* - `query` (Object): The url querystring, parsed as object.
* - `parse_failed` (Boolean): Whether the parsing failed or not.
*/
const parseUrl$1 = (url, normalize = false) => {
const GIT_RE = /^(?:([a-z_][a-z0-9_-]{0,31})@|https?:\/\/)([\w\.\-@]+)[\/:]([\~,\.\w,\-,\_,\/]+?(?:\.git|\/)?)$/;
const throwErr = (msg$3) => {
const err$1 = new Error(msg$3);
err$1.subject_url = url;
throw err$1;
};
if (typeof url !== "string" || !url.trim()) throwErr("Invalid url.");
if (url.length > parseUrl$1.MAX_INPUT_LENGTH) throwErr("Input exceeds maximum length. If needed, change the value of parseUrl.MAX_INPUT_LENGTH.");
if (normalize) {
if (typeof normalize !== "object") normalize = { stripHash: false };
url = normalizeUrl(url, normalize);
}
const parsed = parsePath__default["default"](url);
if (parsed.parse_failed) {
const matched = parsed.href.match(GIT_RE);
if (matched) {
parsed.protocols = ["ssh"];
parsed.protocol = "ssh";
parsed.resource = matched[2];
parsed.host = matched[2];
parsed.user = matched[1];
parsed.pathname = `/${matched[3]}`;
parsed.parse_failed = false;
} else throwErr("URL parsing failed.");
}
return parsed;
};
parseUrl$1.MAX_INPUT_LENGTH = 2048;
module.exports = parseUrl$1;
}) });
//#endregion
//#region ../../node_modules/.pnpm/is-ssh@1.4.1/node_modules/is-ssh/lib/index.js
var require_lib$6 = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/is-ssh@1.4.1/node_modules/is-ssh/lib/index.js": ((exports, module) => {
var protocols = require_lib$8();
/**
* isSsh
* Checks if an input value is a ssh url or not.
*
* @name isSsh
* @function
* @param {String|Array} input The input url or an array of protocols.
* @return {Boolean} `true` if the input is a ssh url, `false` otherwise.
*/
function isSsh$1(input) {
if (Array.isArray(input)) return input.indexOf("ssh") !== -1 || input.indexOf("rsync") !== -1;
if (typeof input !== "string") return false;
var prots = protocols(input);
input = input.substring(input.indexOf("://") + 3);
if (isSsh$1(prots)) return true;
var urlPortPattern = /* @__PURE__ */ new RegExp(".([a-zA-Z\\d]+):(\\d+)/");
return !input.match(urlPortPattern) && input.indexOf("@") < input.indexOf(":");
}
module.exports = isSsh$1;
}) });
//#endregion
//#region ../../node_modules/.pnpm/git-up@7.0.0/node_modules/git-up/lib/index.js
var require_lib$5 = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/git-up@7.0.0/node_modules/git-up/lib/index.js": ((exports, module) => {
var parseUrl = require_dist(), isSsh = require_lib$6();
/**
* gitUp
* Parses the input url.
*
* @name gitUp
* @function
* @param {String} input The input url.
* @return {Object} An object containing the following fields:
*
* - `protocols` (Array): An array with the url protocols (usually it has one element).
* - `port` (null|Number): The domain port.
* - `resource` (String): The url domain (including subdomains).
* - `user` (String): The authentication user (usually for ssh urls).
* - `pathname` (String): The url pathname.
* - `hash` (String): The url hash.
* - `search` (String): The url querystring value.
* - `href` (String): The input url.
* - `protocol` (String): The git url protocol.
* - `token` (String): The oauth token (could appear in the https urls).
*/
function gitUp$1(input) {
var output = parseUrl(input);
output.token = "";
if (output.password === "x-oauth-basic") output.token = output.user;
else if (output.user === "x-token-auth") output.token = output.password;
if (isSsh(output.protocols) || output.protocols.length === 0 && isSsh(input)) output.protocol = "ssh";
else if (output.protocols.length) output.protocol = output.protocols[0];
else {
output.protocol = "file";
output.protocols = ["file"];
}
output.href = output.href.replace(/\/$/, "");
return output;
}
module.exports = gitUp$1;
}) });
//#endregion
//#region ../../node_modules/.pnpm/git-url-parse@13.1.1/node_modules/git-url-parse/lib/index.js
var require_lib$4 = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/git-url-parse@13.1.1/node_modules/git-url-parse/lib/index.js": ((exports, module) => {
var gitUp = require_lib$5();
/**
* gitUrlParse
* Parses a Git url.
*
* @name gitUrlParse
* @function
* @param {String} url The Git url to parse.
* @return {GitUrl} The `GitUrl` object containing:
*
* - `protocols` (Array): An array with the url protocols (usually it has one element).
* - `port` (null|Number): The domain port.
* - `resource` (String): The url domain (including subdomains).
* - `user` (String): The authentication user (usually for ssh urls).
* - `pathname` (String): The url pathname.
* - `hash` (String): The url hash.
* - `search` (String): The url querystring value.
* - `href` (String): The input url.
* - `protocol` (String): The git url protocol.
* - `token` (String): The oauth token (could appear in the https urls).
* - `source` (String): The Git provider (e.g. `"github.com"`).
* - `owner` (String): The repository owner.
* - `name` (String): The repository name.
* - `ref` (String): The repository ref (e.g., "master" or "dev").
* - `filepath` (String): A filepath relative to the repository root.
* - `filepathtype` (String): The type of filepath in the url ("blob" or "tree").
* - `full_name` (String): The owner and name values in the `owner/name` format.
* - `toString` (Function): A function to stringify the parsed url into another url type.
* - `organization` (String): The organization the owner belongs to. This is CloudForge specific.
* - `git_suffix` (Boolean): Whether to add the `.git` suffix or not.
*
*/
function gitUrlParse(url) {
if (typeof url !== "string") throw new Error("The url must be a string.");
var shorthandRe = /^([a-z\d-]{1,39})\/([-\.\w]{1,100})$/i;
if (shorthandRe.test(url)) url = "https://github.com/" + url;
var urlInfo = gitUp(url), sourceParts = urlInfo.resource.split("."), splits = null;
urlInfo.toString = function(type) {
return gitUrlParse.stringify(this, type);
};
urlInfo.source = sourceParts.length > 2 ? sourceParts.slice(1 - sourceParts.length).join(".") : urlInfo.source = urlInfo.resource;
urlInfo.git_suffix = /\.git$/.test(urlInfo.pathname);
urlInfo.name = decodeURIComponent((urlInfo.pathname || urlInfo.href).replace(/(^\/)|(\/$)/g, "").replace(/\.git$/, ""));
urlInfo.owner = decodeURIComponent(urlInfo.user);
switch (urlInfo.source) {
case "git.cloudforge.com":
urlInfo.owner = urlInfo.user;
urlInfo.organization = sourceParts[0];
urlInfo.source = "cloudforge.com";
break;
case "visualstudio.com": if (urlInfo.resource === "vs-ssh.visualstudio.com") {
splits = urlInfo.name.split("/");
if (splits.length === 4) {
urlInfo.organization = splits[1];
urlInfo.owner = splits[2];
urlInfo.name = splits[3];
urlInfo.full_name = splits[2] + "/" + splits[3];
}
break;
} else {
splits = urlInfo.name.split("/");
if (splits.length === 2) {
urlInfo.owner = splits[1];
urlInfo.name = splits[1];
urlInfo.full_name = "_git/" + urlInfo.name;
} else if (splits.length === 3) {
urlInfo.name = splits[2];
if (splits[0] === "DefaultCollection") {
urlInfo.owner = splits[2];
urlInfo.organization = splits[0];
urlInfo.full_name = urlInfo.organization + "/_git/" + urlInfo.name;
} else {
urlInfo.owner = splits[0];
urlInfo.full_name = urlInfo.owner + "/_git/" + urlInfo.name;
}
} else if (splits.length === 4) {
urlInfo.organization = splits[0];
urlInfo.owner = splits[1];
urlInfo.name = splits[3];
urlInfo.full_name = urlInfo.organization + "/" + urlInfo.owner + "/_git/" + urlInfo.name;
}
break;
}
case "dev.azure.com":
case "azure.com": if (urlInfo.resource === "ssh.dev.azure.com") {
splits = urlInfo.name.split("/");
if (splits.length === 4) {
urlInfo.organization = splits[1];
urlInfo.owner = splits[2];
urlInfo.name = splits[3];
}
break;
} else {
splits = urlInfo.name.split("/");
if (splits.length === 5) {
urlInfo.organization = splits[0];
urlInfo.owner = splits[1];
urlInfo.name = splits[4];
urlInfo.full_name = "_git/" + urlInfo.name;
} else if (splits.length === 3) {
urlInfo.name = splits[2];
if (splits[0] === "DefaultCollection") {
urlInfo.owner = splits[2];
urlInfo.organization = splits[0];
urlInfo.full_name = urlInfo.organization + "/_git/" + urlInfo.name;
} else {
urlInfo.owner = splits[0];
urlInfo.full_name = urlInfo.owner + "/_git/" + urlInfo.name;
}
} else if (splits.length === 4) {
urlInfo.organization = splits[0];
urlInfo.owner = splits[1];
urlInfo.name = splits[3];
urlInfo.full_name = urlInfo.organization + "/" + urlInfo.owner + "/_git/" + urlInfo.name;
}
if (urlInfo.query && urlInfo.query["path"]) urlInfo.filepath = urlInfo.query["path"].replace(/^\/+/g, "");
if (urlInfo.query && urlInfo.query["version"]) urlInfo.ref = urlInfo.query["version"].replace(/^GB/, "");
break;
}
default:
splits = urlInfo.name.split("/");
var nameIndex = splits.length - 1;
if (splits.length >= 2) {
var dashIndex = splits.indexOf("-", 2);
var blobIndex = splits.indexOf("blob", 2);
var treeIndex = splits.indexOf("tree", 2);
var commitIndex = splits.indexOf("commit", 2);
var srcIndex = splits.indexOf("src", 2);
var rawIndex = splits.indexOf("raw", 2);
var editIndex = splits.indexOf("edit", 2);
nameIndex = dashIndex > 0 ? dashIndex - 1 : blobIndex > 0 ? blobIndex - 1 : treeIndex > 0 ? treeIndex - 1 : commitIndex > 0 ? commitIndex - 1 : srcIndex > 0 ? srcIndex - 1 : rawIndex > 0 ? rawIndex - 1 : editIndex > 0 ? editIndex - 1 : nameIndex;
urlInfo.owner = splits.slice(0, nameIndex).join("/");
urlInfo.name = splits[nameIndex];
if (commitIndex) urlInfo.commit = splits[nameIndex + 2];
}
urlInfo.ref = "";
urlInfo.filepathtype = "";
urlInfo.filepath = "";
var offsetNameIndex = splits.length > nameIndex && splits[nameIndex + 1] === "-" ? nameIndex + 1 : nameIndex;
if (splits.length > offsetNameIndex + 2 && [
"raw",
"src",
"blob",
"tree",
"edit"
].indexOf(splits[offsetNameIndex + 1]) >= 0) {
urlInfo.filepathtype = splits[offsetNameIndex + 1];
urlInfo.ref = splits[offsetNameIndex + 2];
if (splits.length > offsetNameIndex + 3) urlInfo.filepath = splits.slice(offsetNameIndex + 3).join("/");
}
urlInfo.organization = urlInfo.owner;
break;
}
if (!urlInfo.full_name) {
urlInfo.full_name = urlInfo.owner;
if (urlInfo.name) {
urlInfo.full_name && (urlInfo.full_name += "/");
urlInfo.full_name += urlInfo.name;
}
}
if (urlInfo.owner.startsWith("scm/")) {
urlInfo.source = "bitbucket-server";
urlInfo.owner = urlInfo.owner.replace("scm/", "");
urlInfo.organization = urlInfo.owner;
urlInfo.full_name = urlInfo.owner + "/" + urlInfo.name;
}
var bitbucket = /(projects|users)\/(.*?)\/repos\/(.*?)((\/.*$)|$)/;
var matches = bitbucket.exec(urlInfo.pathname);
if (matches != null) {
urlInfo.source = "bitbucket-server";
if (matches[1] === "users") urlInfo.owner = "~" + matches[2];
else urlInfo.owner = matches[2];
urlInfo.organization = urlInfo.owner;
urlInfo.name = matches[3];
splits = matches[4].split("/");
if (splits.length > 1) {
if (["raw", "browse"].indexOf(splits[1]) >= 0) {
urlInfo.filepathtype = splits[1];
if (splits.length > 2) urlInfo.filepath = splits.slice(2).join("/");
} else if (splits[1] === "commits" && splits.length > 2) urlInfo.commit = splits[2];
}
urlInfo.full_name = urlInfo.owner + "/" + urlInfo.name;
if (urlInfo.query.at) urlInfo.ref = urlInfo.query.at;
else urlInfo.ref = "";
}
return urlInfo;
}
/**
* stringify
* Stringifies a `GitUrl` object.
*
* @name stringify
* @function
* @param {GitUrl} obj The parsed Git url object.
* @param {String} type The type of the stringified url (default `obj.protocol`).
* @return {String} The stringified url.
*/
gitUrlParse.stringify = function(obj, type) {
type = type || (obj.protocols && obj.protocols.length ? obj.protocols.join("+") : obj.protocol);
var port = obj.port ? ":" + obj.port : "";
var user = obj.user || "git";
var maybeGitSuffix = obj.git_suffix ? ".git" : "";
switch (type) {
case "ssh": if (port) return "ssh://" + user + "@" + obj.resource + port + "/" + obj.full_name + maybeGitSuffix;
else return user + "@" + obj.resource + ":" + obj.full_name + maybeGitSuffix;
case "git+ssh":
case "ssh+git":
case "ftp":
case "ftps": return type + "://" + user + "@" + obj.resource + port + "/" + obj.full_name + maybeGitSuffix;
case "http":
case "https":
var auth = obj.token ? buildToken(obj) : obj.user && (obj.protocols.includes("http") || obj.protocols.includes("https")) ? obj.user + "@" : "";
return type + "://" + auth + obj.resource + port + "/" + buildPath(obj) + maybeGitSuffix;
default: return obj.href;
}
};
/*!
* buildToken
* Builds OAuth token prefix (helper function)
*
* @name buildToken
* @function
* @param {GitUrl} obj The parsed Git url object.
* @return {String} token prefix
*/
function buildToken(obj) {
switch (obj.source) {
case "bitbucket.org": return "x-token-auth:" + obj.token + "@";
default: return obj.token + "@";
}
}
function buildPath(obj) {
switch (obj.source) {
case "bitbucket-server": return "scm/" + obj.full_name;
default: return "" + obj.full_name;
}
}
module.exports = gitUrlParse;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/git/getRepositoryName.js
var require_getRepositoryName = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/git/getRepositoryName.js": ((exports) => {
var __importDefault$16 = exports && exports.__importDefault || function(mod) {
return mod && mod.__esModule ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getRepositoryName = void 0;
const git_url_parse_1 = __importDefault$16(require_lib$4());
/**
* Get a repository full name (owner and repo, plus organization for ADO/VSO) from a repository URL,
* including special handling for the many ADO/VSO URL formats.
*
* Examples:
* - returns `microsoft/workspace-tools` for `https://github.com/microsoft/workspace-tools.git`
* - returns `foo/bar/some-repo` for `https://dev.azure.com/foo/bar/_git/some-repo`
*/
function getRepositoryName(url) {
try {
let fixedUrl = url.replace("/_optimized/", "/").replace("/DefaultCollection/", "/");
const parsedUrl = (0, git_url_parse_1.default)(fixedUrl);
const isVSO = fixedUrl.includes(".visualstudio.com");
const isADO = fixedUrl.includes("dev.azure.com");
if (!isVSO && !isADO) return parsedUrl.full_name;
const sshMatch = parsedUrl.full_name.match(/(vs-ssh\.visualstudio\.com|ssh\.dev\.azure\.com):v\d+\/([^/]+)\/([^/]+)/);
if (sshMatch) return `${sshMatch[2]}/${sshMatch[3]}/${parsedUrl.name}`;
let organization = parsedUrl.organization;
if (!organization && isVSO) organization = parsedUrl.resource.match(/([^.@]+)\.visualstudio\.com/)?.[1];
return `${organization}/${parsedUrl.owner}/${parsedUrl.name}`;
} catch (err$1) {
return "";
}
}
exports.getRepositoryName = getRepositoryName;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/git/getDefaultRemote.js
var require_getDefaultRemote = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/git/getDefaultRemote.js": ((exports) => {
var __importDefault$15 = exports && exports.__importDefault || function(mod) {
return mod && mod.__esModule ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getDefaultRemote = void 0;
const fs_1$8 = __importDefault$15(__require("fs"));
const path_1$12 = __importDefault$15(__require("path"));
const paths_1$2 = require_paths();
const getRepositoryName_1 = require_getRepositoryName();
const git_1$4 = require_git$1();
function getDefaultRemote(cwdOrOptions) {
const options = typeof cwdOrOptions === "string" ? { cwd: cwdOrOptions } : cwdOrOptions;
const { cwd, strict, verbose } = options;
const log$1 = (message) => verbose && console.log(message);
const logOrThrow = (message) => {
if (strict) throw new Error(message);
log$1(message);
};
const gitRoot = (0, paths_1$2.findGitRoot)(cwd);
let packageJson = {};
const packageJsonPath = path_1$12.default.join(gitRoot, "package.json");
try {
packageJson = JSON.parse(fs_1$8.default.readFileSync(packageJsonPath, "utf8").trim());
} catch (e) {
logOrThrow(`Could not read "${packageJsonPath}"`);
}
const { repository } = packageJson;
const repositoryUrl = typeof repository === "string" ? repository : repository && repository.url || "";
if (!repositoryUrl) console.log(`Valid "repository" key not found in "${packageJsonPath}". Consider adding this info for more accurate git remote detection.`);
/** Repository full name (owner and repo name) specified in package.json */
const repositoryName = (0, getRepositoryName_1.getRepositoryName)(repositoryUrl);
const remotesResult = (0, git_1$4.git)(["remote", "-v"], { cwd });
if (!remotesResult.success) logOrThrow(`Could not determine available git remotes under "${cwd}"`);
/** Mapping from remote URL to full name (owner and repo name) */
const remotes = {};
remotesResult.stdout.split("\n").forEach((line) => {
const [remoteName, remoteUrl] = line.split(/\s+/);
const remoteRepoName = (0, getRepositoryName_1.getRepositoryName)(remoteUrl);
if (remoteRepoName) remotes[remoteRepoName] = remoteName;
});
if (repositoryName) {
if (remotes[repositoryName]) return remotes[repositoryName];
logOrThrow(`Could not find remote pointing to repository "${repositoryName}".`);
}
const allRemoteNames = Object.values(remotes);
const fallbacks = [
"upstream",
"origin",
...allRemoteNames
];
for (const fallback of fallbacks) if (allRemoteNames.includes(fallback)) {
log$1(`Default to remote "${fallback}"`);
return fallback;
}
logOrThrow(`Could not find any remotes in git repo at "${gitRoot}".`);
log$1(`Assuming default remote "origin".`);
return "origin";
}
exports.getDefaultRemote = getDefaultRemote;
}) });
//#endregion
//#region ../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/git/gitUtilities.js
var require_gitUtilities = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/workspace-tools@0.36.4/node_modules/workspace-tools/lib/git/gitUtilities.js": ((exports) => {
Object.defineProperty(exports, "__esModule", { value: true });
exports.listAllTrackedFiles = exports.getDefaultBranch = exports.parseRemoteBranch = exports.getRemoteBranch = exports.getParentBranch = exports.revertLocalChanges = exports.stageAndCommit = exports.commit = exports.stage = exports.init = exports.getFileAddedHash = exports.getCurrentHash = exports.getShortBranchName = exports.getFullBranchRef = exports.getBranchName = exports.getUserEmail = exports.getRecentCommitMessages = exports.getStagedChanges = exports.getChangesBetweenRefs = exports.getBranchChanges = exports.getChanges = exports.getUnstagedChanges = exports.fetchRemoteBranch = exports.fetchRemote = exports.getUntrackedChanges = void 0;
const git_1$3 = require_git$1();
function getUntrackedChanges(cwd) {
try {
return processGitOutput((0, git_1$3.git)([
"ls-files",
"--others",
"--exclude-standard"
], { cwd }));
} catch (e) {
throw new git_1$3.GitError(`Cannot gather information about untracked changes`, e);
}
}
exports.getUntrackedChanges = getUntrackedChanges;
function fetchRemote(remote, cwd) {
const results = (0, git_1$3.git)([
"fetch",
"--",
remote
], { cwd });
if (!results.success) throw new git_1$3.GitError(`Cannot fetch remote "${remote}"`);
}
exports.fetchRemote = fetchRemote;
function fetchRemoteBranch(remote, remoteBranch, cwd) {
const results = (0, git_1$3.git)([
"fetch",
"--",
remote,
remoteBranch
], { cwd });
if (!results.success) throw new git_1$3.GitError(`Cannot fetch branch "${remoteBranch}" from remote "${remote}"`);
}
exports.fetchRemoteBranch = fetchRemoteBranch;
/**
* Gets all the changes that have not been staged yet
* @param cwd
*/
function getUnstagedChanges(cwd) {
try {
return processGitOutput((0, git_1$3.git)([
"--no-pager",
"diff",
"--name-only",
"--relative"
], { cwd }));
} catch (e) {
throw new git_1$3.GitError(`Cannot gather information about unstaged changes`, e);
}
}
exports.getUnstagedChanges = getUnstagedChanges;
function getChanges(branch, cwd) {
try {
return processGitOutput((0, git_1$3.git)([
"--no-pager",
"diff",
"--relative",
"--name-only",
branch + "..."
], { cwd }));
} catch (e) {
throw new git_1$3.GitError(`Cannot gather information about changes`, e);
}
}
exports.getChanges = getChanges;
/**
* Gets all the changes between the branch and the merge-base
*/
function getBranchChanges(branch, cwd) {
return getChangesBetweenRefs(branch, "", [], "", cwd);
}
exports.getBranchChanges = getBranchChanges;
function getChangesBetweenRefs(fromRef, toRef, options, pattern$1, cwd) {
try {
return processGitOutput((0, git_1$3.git)([
"--no-pager",
"diff",
"--name-only",
"--relative",
...options,
`${fromRef}...${toRef}`,
...pattern$1 ? ["--", pattern$1] : []
], { cwd }));
} catch (e) {
throw new git_1$3.GitError(`Cannot gather information about change between refs changes (${fromRef} to ${toRef})`, e);
}
}
exports.getChangesBetweenRefs = getChangesBetweenRefs;
function getStagedChanges(cwd) {
try {
return processGitOutput((0, git_1$3.git)([
"--no-pager",
"diff",
"--relative",
"--staged",
"--name-only"
], { cwd }));
} catch (e) {
throw new git_1$3.GitError(`Cannot gather information about staged changes`, e);
}
}
exports.getStagedChanges = getStagedChanges;
function getRecentCommitMessages(branch, cwd) {
try {
const results = (0, git_1$3.git)([
"log",
"--decorate",
"--pretty=format:%s",
`${branch}..HEAD`
], { cwd });
if (!results.success) return [];
return results.stdout.split(/\n/).map((line) => line.trim()).filter((line) => !!line);
} catch (e) {
throw new git_1$3.GitError(`Cannot gather information about recent commits`, e);
}
}
exports.getRecentCommitMessages = getRecentCommitMessages;
function getUserEmail(cwd) {
try {
const results = (0, git_1$3.git)(["