@reliverse/rse-sdk
Version:
@reliverse/rse-sdk without cli. @reliverse/rse-sdk allows you to create new plugins for @reliverse/rse CLI, interact with reliverse.org, and even extend your own CLI functionality (you may also try @reliverse/dler-sdk for this case).
398 lines (397 loc) • 13.5 kB
JavaScript
import { getRseConfigPath } from "@reliverse/cfg";
import path, { dirname } from "@reliverse/pathkit";
import { ensuredir } from "@reliverse/relifso";
import fs from "@reliverse/relifso";
import { rmEnsureDir, setHiddenAttribute } from "@reliverse/relifso";
import { relinka } from "@reliverse/relinka";
import { selectPrompt } from "@reliverse/rempts";
import { exec } from "child_process";
import https from "https";
import { HttpsProxyAgent } from "https-proxy-agent";
import { installDependencies } from "nypm";
import prettyBytes from "pretty-bytes";
import { simpleGit } from "simple-git";
import { extract } from "tar";
import { promisify } from "util";
import {
cliConfigJsonc,
cliConfigTs,
cliHomeRepos
} from "../../constants.js";
import { initGitDir } from "../../init/use-template/cp-modules/git-deploy-prompts/git.js";
const execAsync = promisify(exec);
async function getFolderSize(directory, skipDirs = []) {
let totalSize = 0;
const entries = await fs.readdir(directory);
for (const entry of entries) {
if (skipDirs.includes(entry)) continue;
const fullPath = path.join(directory, entry);
const stats = await fs.stat(fullPath);
if (stats.isFile()) {
totalSize += stats.size;
} else if (stats.isDirectory()) {
totalSize += await getFolderSize(fullPath, skipDirs);
}
}
return totalSize;
}
function parseGitURI(input) {
const normalizedInput = input.trim().replace(
/^https?:\/\/(www\.)?(github|gitlab|bitbucket|sourcehut)\.com\//,
""
).replace(/^(github|gitlab|bitbucket|sourcehut)\.com\//, "").replace(/^https?:\/\/git\.sr\.ht\/~/, "").replace(/^git\.sr\.ht\/~/, "");
const pattern = /^(?:(?<provider>[^:]+):)?(?<repo>[^#]+)(?<refPart>#[^/]+)?(?<subdir>\/.*)?$/;
const match = pattern.exec(normalizedInput);
if (!match?.groups) {
return {
provider: void 0,
repo: normalizedInput,
ref: "main",
subdir: ""
};
}
const { provider, repo, refPart, subdir } = match.groups;
return {
provider: provider?.trim(),
repo: repo?.trim() ?? normalizedInput,
ref: refPart ? refPart.slice(1).trim() : "main",
subdir: subdir?.trim() ?? ""
};
}
function getRepoUrl(repo, provider) {
switch (provider) {
case "gitlab":
return `https://gitlab.com/${repo}.git`;
case "bitbucket":
return `https://bitbucket.org/${repo}.git`;
case "sourcehut":
return `https://git.sr.ht/~${repo}`;
default:
return `https://github.com/${repo}.git`;
}
}
function computeRepoInfo(input, defaultProvider, githubToken, subdirectory) {
const { provider: parsedProvider, repo, ref, subdir } = parseGitURI(input);
const actualProvider = parsedProvider ?? defaultProvider;
const name = repo.replace("/", "-");
const headers = {};
if (githubToken) {
headers.Authorization = `Bearer ${githubToken}`;
}
const gitUrl = getRepoUrl(repo, actualProvider);
return {
name,
version: ref,
subdir: subdirectory ?? subdir.replace(/^\/+/, ""),
defaultDir: name,
headers,
gitUrl
};
}
async function getUniqueProjectPath(basePath, projectName, isDev) {
let iteration = 1;
let currentPath = basePath;
let currentName = projectName;
while (await fs.pathExists(currentPath)) {
currentName = `${projectName}-${iteration}`;
currentPath = isDev ? path.join(dirname(basePath), "tests-runtime", currentName) : path.join(dirname(basePath), currentName);
iteration++;
}
return currentPath;
}
async function getCommitHash(repoUrl, ref) {
const { stdout } = await execAsync(`git ls-remote ${repoUrl} ${ref}`);
const lines = stdout.split("\n").filter(Boolean);
if (!lines.length) {
throw new Error(`Could not find commit hash for ref ${ref}`);
}
const firstLine = lines[0];
if (!firstLine) {
throw new Error("Invalid git response format");
}
const [hash] = firstLine.split(" ");
if (!hash) {
throw new Error("Failed to extract commit hash from git response");
}
return hash;
}
function downloadTarball(url, dest) {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(dest);
const proxy = process.env.https_proxy;
const options = proxy ? { agent: new HttpsProxyAgent(proxy) } : {};
https.get(url, options, (response) => {
if (response.statusCode && response.statusCode >= 400) {
reject(
new Error(
`Failed to download tarball: ${response.statusCode} ${response.statusMessage}`
)
);
return;
}
response.pipe(file);
file.on("finish", () => {
file.close(() => resolve());
});
}).on("error", (err) => {
fs.unlink(dest).catch(
(unlinkErr) => console.error("Failed to unlink:", unlinkErr)
);
reject(err);
});
});
}
async function extractTarball(tarball, dest, subdir = "") {
const strip = subdir ? subdir.split("/").length : 1;
await extract({ file: tarball, C: dest, strip });
}
export async function downloadRepo({
repoURL,
projectName,
isDev,
cwd,
githubToken,
install = false,
provider = "github",
subdirectory,
force = false,
forceClean = false,
preserveGit = true,
config,
returnTime = false,
returnSize = false,
returnConcurrency = false,
fastCloneSource,
isTemplateDownload,
cache = false
}) {
relinka("verbose", `Downloading repo ${repoURL}...`);
const startTime = Date.now();
let tempCloneDir;
const maxConcurrentProcesses = 6;
let projectPath = isDev ? path.join(cwd, "tests-runtime", projectName) : path.join(cwd, projectName);
relinka("verbose", `Preparing to place repo in: ${projectPath}`);
if (forceClean) {
await fs.remove(projectPath);
} else if (!force && await fs.pathExists(projectPath)) {
const files = await fs.readdir(projectPath);
const hasOnlyRse = files.length === 1 && files[0] === cliConfigJsonc;
if (files.length > 0 && !hasOnlyRse) {
projectPath = await getUniqueProjectPath(projectPath, projectName, isDev);
relinka(
"verbose",
`Directory already exists. Using new path: ${projectPath}`
);
}
}
await ensuredir(projectPath);
const parentDir = dirname(projectPath);
try {
await getRseConfigPath(parentDir, isDev, true);
} catch (_error) {
}
const { configPath: projectConfigPath } = await getRseConfigPath(
projectPath,
isDev,
true
);
const hasConfig = await fs.pathExists(projectConfigPath);
if (hasConfig) {
const choice = await selectPrompt({
title: `${projectConfigPath} already exists in parent directory. What would you like to do?`,
options: [
{ value: "delete", label: "Delete existing file" },
{ value: "backup", label: "Create backup" }
]
});
if (choice === "delete") {
await fs.remove(projectConfigPath);
} else {
let backupPath = path.join(
parentDir,
projectConfigPath.endsWith(cliConfigJsonc) ? "rseconfig" : "rseconfig"
);
let iteration = 1;
while (await fs.pathExists(backupPath)) {
backupPath = path.join(
parentDir,
`${projectConfigPath.endsWith(cliConfigJsonc) ? "rseconfig" : "rseconfig"}${iteration}.${projectConfigPath.endsWith(cliConfigJsonc) ? "jsonc" : "ts"}`
);
iteration++;
}
await fs.move(projectConfigPath, backupPath);
}
await fs.move(
path.join(
projectPath,
projectConfigPath.endsWith(cliConfigJsonc) ? cliConfigJsonc : cliConfigTs
),
projectConfigPath
);
await rmEnsureDir(projectPath);
}
const repoInfo = computeRepoInfo(
repoURL,
provider,
githubToken,
subdirectory
);
if (!repoInfo.gitUrl) {
throw new Error(`Invalid repository URL or provider: ${repoURL}`);
}
let finalUrl = repoInfo.gitUrl;
if (githubToken) {
const authUrl = new URL(repoInfo.gitUrl);
authUrl.username = "oauth2";
authUrl.password = githubToken;
finalUrl = authUrl.toString();
}
if (cache && !preserveGit) {
relinka("verbose", "Using tarball cache method...");
const commitHash = await getCommitHash(finalUrl, repoInfo.version);
const tarballCacheDir = path.join(cliHomeRepos, "tarball-cache");
await ensuredir(tarballCacheDir);
const tarballFile = path.join(tarballCacheDir, `${commitHash}.tar.gz`);
if (!await fs.pathExists(tarballFile)) {
let tarUrl = "";
if (repoInfo.gitUrl.includes("gitlab.com")) {
tarUrl = `${repoInfo.gitUrl.replace(".git", "")}/repository/archive.tar.gz?ref=${commitHash}`;
} else if (repoInfo.gitUrl.includes("bitbucket.org")) {
tarUrl = `${repoInfo.gitUrl.replace(".git", "")}/get/${commitHash}.tar.gz`;
} else {
tarUrl = `${repoInfo.gitUrl.replace(".git", "")}/archive/${commitHash}.tar.gz`;
}
relinka("verbose", `Downloading tarball from ${tarUrl}`);
await downloadTarball(tarUrl, tarballFile);
}
relinka("verbose", `Extracting tarball to ${projectPath}`);
await extractTarball(tarballFile, projectPath, repoInfo.subdir);
const durationSeconds2 = (Date.now() - startTime) / 1e3;
const result2 = { source: repoURL, dir: projectPath };
if (returnTime) result2.time = durationSeconds2;
if (returnSize) {
const folderSizeBytes = await getFolderSize(projectPath, [".git"]);
const sizeMB = parseFloat((folderSizeBytes / (1024 * 1024)).toFixed(2));
result2.size = sizeMB;
result2.sizePretty = prettyBytes(folderSizeBytes);
}
if (returnConcurrency) result2.concurrency = 1;
return result2;
}
if (fastCloneSource) {
relinka("verbose", `Using fast clone method from: ${fastCloneSource}`);
await fs.copy(fastCloneSource, path.join(projectPath, ".git"));
const git = simpleGit({ maxConcurrentProcesses });
await git.cwd(projectPath);
await git.checkout(["--", "."]);
} else {
const git = simpleGit({ maxConcurrentProcesses });
try {
if (repoInfo.subdir) {
if (preserveGit) {
await git.clone(finalUrl, projectPath, [
"--branch",
repoInfo.version
]);
await git.cwd(projectPath);
await git.raw(["sparse-checkout", "init", "--cone"]);
await git.raw(["sparse-checkout", "set", repoInfo.subdir]);
const subdirPath = path.join(projectPath, repoInfo.subdir);
if (!await fs.pathExists(subdirPath)) {
throw new Error(
`Subdirectory '${repoInfo.subdir}' not found in repository ${repoURL}`
);
}
const files = await fs.readdir(subdirPath);
for (const file of files) {
await fs.move(
path.join(subdirPath, file),
path.join(projectPath, file),
{ overwrite: true }
);
}
await fs.remove(subdirPath);
} else {
tempCloneDir = await fs.mkdtemp(path.join(parentDir, "gitclone-"));
await git.clone(finalUrl, tempCloneDir, [
"--branch",
repoInfo.version,
"--depth",
"1",
"--single-branch"
]);
const srcSubdir = path.join(tempCloneDir, repoInfo.subdir);
if (!await fs.pathExists(srcSubdir)) {
throw new Error(
`Subdirectory '${repoInfo.subdir}' not found in repository ${repoURL}`
);
}
await fs.copy(srcSubdir, projectPath, {
filter: (src) => !src.includes(`${path.sep}.git`)
});
}
} else {
const cloneOptions = ["--branch", repoInfo.version];
if (!preserveGit) {
cloneOptions.push("--depth", "1", "--single-branch");
}
await git.clone(finalUrl, projectPath, cloneOptions);
}
if (!repoInfo.subdir) {
if (!preserveGit) {
await fs.remove(path.join(projectPath, ".git"));
if (config) {
relinka("verbose", "[D] initGitDir");
await initGitDir({
cwd,
isDev,
projectName,
projectPath,
allowReInit: true,
createCommit: true,
config,
isTemplateDownload
});
}
} else {
await setHiddenAttribute(path.join(projectPath, ".git"));
}
} else {
if (preserveGit) {
await setHiddenAttribute(path.join(projectPath, ".git"));
}
}
} finally {
if (tempCloneDir && await fs.pathExists(tempCloneDir)) {
await fs.remove(tempCloneDir);
}
}
}
if (hasConfig) {
await fs.move(projectConfigPath, projectConfigPath, {
overwrite: true
});
}
if (install) {
relinka("info", "Installing dependencies...");
await installDependencies({
cwd: projectPath,
silent: false
});
}
relinka("verbose", "Repository downloaded successfully!");
const durationSeconds = (Date.now() - startTime) / 1e3;
const result = { source: repoURL, dir: projectPath };
if (returnTime) result.time = durationSeconds;
if (returnSize) {
const folderSizeBytes = await getFolderSize(
projectPath,
preserveGit ? [] : [".git"]
);
const sizeMB = parseFloat((folderSizeBytes / (1024 * 1024)).toFixed(2));
result.size = sizeMB;
result.sizePretty = prettyBytes(folderSizeBytes);
}
if (returnConcurrency) result.concurrency = maxConcurrentProcesses;
return result;
}