gitly
Version:
An API to download and/or extract git repositories
407 lines (390 loc) • 13.4 kB
JavaScript
;
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/main.ts
var main_exports = {};
__export(main_exports, {
clone: () => clone,
default: () => gitly,
download: () => download,
extract: () => extract_default,
parse: () => parse
});
module.exports = __toCommonJS(main_exports);
// src/utils/clone.ts
var import_cross_spawn = __toESM(require("cross-spawn"), 1);
var import_promises = require("fs/promises");
var import_node_path3 = __toESM(require("path"), 1);
var tar2 = __toESM(require("tar"), 1);
// src/utils/archive.ts
var import_node_os = __toESM(require("os"), 1);
var import_node_path = require("path");
var tar = __toESM(require("tar"), 1);
function getArchiveUrl(info, options = {}) {
var _a6;
const { path: repo, type } = info;
if ((_a6 = options.url) == null ? void 0 : _a6.filter) {
return options.url.filter(info);
}
switch (info.hostname) {
case "bitbucket":
return `https://bitbucket.org${repo}/get/${type}.tar.gz`;
case "gitlab":
return `https://gitlab.com${repo}/-/archive/${type}/${repo.split("/")[2]}-${type}.tar.gz`;
default:
return `https://github.com${repo}/archive/${type}.tar.gz`;
}
}
function getArchivePath(info, options = {}) {
const { path: path2, type, hostname: site } = info;
return (0, import_node_path.join)(
options.temp || (0, import_node_path.join)(import_node_os.default.homedir(), ".gitly"),
site,
path2,
`${type}.tar.gz`
);
}
var extract2 = tar.extract;
// src/utils/error.ts
var GitlyAbstractError = class extends Error {
constructor(message, code = -1) {
super(message);
this.message = message;
this.code = code;
this.rawMessage = message;
const type = this.type = this.ctor.type;
this.message = `[${type ? `gitly:${type}` : "gitly"}]: ${message}`;
Object.setPrototypeOf(this, new.target.prototype);
}
get ctor() {
return this.constructor;
}
};
var _a;
var GitlyUnknownError = (_a = class extends GitlyAbstractError {
}, _a.type = "unknown" /* Unknown */, _a);
var _a2;
var GitlyFetchError = (_a2 = class extends GitlyAbstractError {
}, _a2.type = "fetch" /* Fetch */, _a2);
var _a3;
var GitlyExtractError = (_a3 = class extends GitlyAbstractError {
}, _a3.type = "extract" /* Extract */, _a3);
var _a4;
var GitlyDownloadError = (_a4 = class extends GitlyAbstractError {
}, _a4.type = "download" /* Download */, _a4);
var _a5;
var GitlyCloneError = (_a5 = class extends GitlyAbstractError {
}, _a5.type = "clone" /* Clone */, _a5);
// src/utils/execute.ts
async function execute(tasks) {
return new Promise((resolve2, reject) => {
const next = () => execute(tasks.slice(1)).then(resolve2);
return tasks[0]().then((t) => t ? resolve2(t) : next()).catch(reject);
});
}
// src/utils/exists.ts
var import_node_fs = require("fs");
var import_node_path2 = require("path");
// src/utils/parse.ts
var import_node_url = require("url");
function parse(url, options = {}) {
const { url: normalized, host } = normalizeURL(url, options);
const result = new import_node_url.URL(normalized);
const paths = (result.pathname || "").split("/").filter(Boolean);
const owner = paths.shift() || "";
const repository = paths.shift() || "";
return {
protocol: (result.protocol || "https").replace(/:/g, ""),
host: result.host || host || "github.com",
hostname: (result.hostname || host || "github").replace(/\.(\S+)/, ""),
hash: result.hash || "",
href: result.href || "",
path: result.pathname || "",
repository,
owner,
type: (result.hash || "#master").substring(1)
};
}
function normalizeURL(url, options) {
const { host } = options;
if (url.includes("0") && Array.from(url.matchAll(/0/g)).length > 25) {
throw new Error("Invalid argument");
}
if ((host == null ? void 0 : host.includes("0")) && Array.from(host.matchAll(/0/g)).length > 25) {
throw new Error("Invalid argument");
}
const isNotProtocol = !/http(s)?:\/\//.test(url);
const hasHost = /([\S]+):.+/.test(url);
const hasTLD = /[\S]+\.([\D]+)/.test(url);
let normalizedURL = url.replace("www.", "").replace(".git", "");
let updatedHost = host || "";
if (isNotProtocol && hasHost) {
const hostMatch = url.match(/([\S]+):.+/);
updatedHost = hostMatch ? hostMatch[1] : "";
normalizedURL = `https://${updatedHost}.com/${normalizedURL.replace(`${updatedHost}:`, "")}`;
} else if (isNotProtocol && hasTLD) {
normalizedURL = `https://${normalizedURL}`;
} else if (isNotProtocol) {
const tldMatch = (host || "").match(/[\S]+\.([\D]+)/);
const domain = (host || "github").replace(
`.${tldMatch ? tldMatch[1] : "com"}`,
""
);
const tld = tldMatch ? tldMatch[1] : "com";
normalizedURL = `https://${domain}.${tld}/${normalizedURL}`;
}
return { url: normalizedURL, host: updatedHost };
}
// src/utils/exists.ts
async function exists(path2, options = {}) {
let _path = path2;
if (!(0, import_node_path2.isAbsolute)(path2)) {
_path = getArchivePath(parse(path2), options);
}
try {
await import_node_fs.promises.access(_path, import_node_fs.constants.F_OK);
return true;
} catch (_) {
}
return false;
}
// src/utils/offline.ts
var import_node_dns = require("dns");
var { lookup } = import_node_dns.promises;
async function isOffline() {
try {
await lookup("google.com");
return false;
} catch (_) {
}
return true;
}
// src/utils/clone.ts
async function clone(repository, options = {}) {
const info = parse(repository, options);
const archivePath = getArchivePath(info, options);
const directory = archivePath.replace(/\.tar\.gz$/, "");
let order = [];
const local = async () => exists(`${archivePath}.tar.gz`);
const remote = async () => {
var _a6;
if (await exists(archivePath)) {
await (0, import_promises.rm)(archivePath);
}
const depth = ((_a6 = options == null ? void 0 : options.git) == null ? void 0 : _a6.depth) || 1;
if (repository.includes("--upload-pack") || directory.includes("--upload-pack")) {
throw new GitlyCloneError("Invalid argument");
}
if (typeof depth !== "number") {
throw new GitlyCloneError("Invalid depth option");
}
if (info.href.includes("--upload-pack")) {
throw new GitlyCloneError("Invalid argument");
}
const child = (0, import_cross_spawn.default)("git", [
"clone",
"--depth",
depth.toString(),
info.href,
directory
]);
await new Promise((resolve2, reject) => {
child.on("error", (reason) => reject(new GitlyCloneError(reason.message)));
child.on("close", (code) => {
if (code === 0) {
(0, import_promises.rm)(import_node_path3.default.resolve(directory, ".git"), { recursive: true }).then(
() => (
// Create the archive after cloning
tar2.create(
{
gzip: true,
file: archivePath,
// Go one level up to include the repository name in the archive
cwd: import_node_path3.default.resolve(archivePath, ".."),
portable: true
},
[info.type]
)
)
).then(
() => (0, import_promises.rm)(import_node_path3.default.resolve(directory), {
recursive: true
})
).then(resolve2).catch((error) => reject(new GitlyCloneError(error.message)));
} else {
reject(new GitlyCloneError("Failed to clone the repository"));
}
});
});
return archivePath;
};
if (await isOffline() || options.cache) {
order = [local];
} else if (options.force || ["master", "main"].includes(info.type)) {
order = [remote, local];
}
try {
const result = await execute(order);
if (typeof result === "boolean") {
return archivePath;
}
return result;
} catch (error) {
if (options.throw) {
throw error;
}
}
return "";
}
// src/utils/download.ts
var import_shelljs = __toESM(require("shelljs"), 1);
// src/utils/fetch.ts
var import_axios = __toESM(require("axios"), 1);
var stream = __toESM(require("stream"), 1);
var import_node_util = require("util");
// src/utils/write.ts
var import_node_fs2 = require("fs");
var import_node_path4 = require("path");
var { mkdir } = import_node_fs2.promises;
async function write(path2) {
const _path = (0, import_node_path4.normalize)(path2);
await mkdir((0, import_node_path4.dirname)(_path), { recursive: true });
return (0, import_node_fs2.createWriteStream)(_path);
}
// src/utils/fetch.ts
var import_node_url2 = require("url");
var pipeline2 = (0, import_node_util.promisify)(stream.pipeline);
async function fetch(url, file, options = {}) {
const response = await import_axios.default.get(url, {
headers: options.headers,
proxy: getProxy(options.proxy),
responseType: "stream",
validateStatus: (status) => status >= 200 && status < 500
});
const { statusText: message, status: code } = response;
if (code >= 400) throw new GitlyDownloadError(message, code);
if (code >= 300 && code < 400 && response.headers.location) {
return fetch(response.headers.location, file);
}
await pipeline2(response.data, await write(file));
return file;
}
function getProxy(proxy) {
if (typeof (proxy == null ? void 0 : proxy.host) === "string" && typeof (proxy == null ? void 0 : proxy.port) === "number") {
return proxy;
}
const proxyUrl = process.env.https_proxy || process.env.http_proxy;
if (typeof proxyUrl === "string") {
try {
const url = new import_node_url2.URL(proxyUrl);
const { protocol, hostname, port } = url;
if (!port) {
return false;
}
return {
protocol: protocol.replace(":", ""),
host: hostname,
port: Number.parseInt(port)
};
} catch (e) {
return false;
}
}
return false;
}
// src/utils/download.ts
var { rm: rm2 } = import_shelljs.default;
async function download(repository, options = {}) {
const info = parse(repository, options);
const archivePath = getArchivePath(info, options);
const url = getArchiveUrl(info, options);
const local = async () => exists(archivePath);
const remote = async () => {
if (await exists(archivePath)) {
rm2(archivePath);
}
return fetch(url, archivePath, options);
};
let order = [local, remote];
if (await isOffline() || options.cache) {
order = [local];
} else if (options.force || ["master", "main"].includes(info.type)) {
order = [remote, local];
}
try {
const result = await execute(order);
if (typeof result === "boolean") {
return archivePath;
}
return result;
} catch (error) {
if (options.throw) {
throw error;
}
}
return "";
}
// src/utils/extract.ts
var import_node_fs3 = require("fs");
var import_node_path5 = require("path");
var { mkdir: mkdir2 } = import_node_fs3.promises;
var extract_default = async (source, destination, options = {}) => {
var _a6;
const _destination = (0, import_node_path5.resolve)(destination);
if (await exists(source, options)) {
try {
const filter = ((_a6 = options.extract) == null ? void 0 : _a6.filter) ? options.extract.filter : () => true;
await mkdir2(destination, { recursive: true });
await extract2({ strip: 1, filter, file: source, cwd: _destination });
return _destination;
} catch (_) {
}
}
return "";
};
// src/utils/gitly.ts
async function gitly(repository, destination, options) {
let source = "";
switch (options == null ? void 0 : options.backend) {
case "git":
source = await clone(repository, options);
break;
default:
source = await download(repository, options);
break;
}
return [source, await extract_default(source, destination, options)];
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
clone,
download,
extract,
parse
});
//# sourceMappingURL=main.cjs.map