tiged
Version:
Straightforward project scaffolding
615 lines (533 loc) • 15.4 kB
JavaScript
;
var fs = require('fs');
var path = require('path');
var tar = require('tar');
var colorette = require('colorette');
var EventEmitter = require('events');
var homeOrTmp = require('home-or-tmp');
var https = require('https');
var child_process = require('child_process');
var URL = require('url');
var Agent = require('https-proxy-agent');
var sander = require('sander');
var rimraf = require('rimraf');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
var tar__default = /*#__PURE__*/_interopDefaultLegacy(tar);
var EventEmitter__default = /*#__PURE__*/_interopDefaultLegacy(EventEmitter);
var homeOrTmp__default = /*#__PURE__*/_interopDefaultLegacy(homeOrTmp);
var https__default = /*#__PURE__*/_interopDefaultLegacy(https);
var child_process__default = /*#__PURE__*/_interopDefaultLegacy(child_process);
var URL__default = /*#__PURE__*/_interopDefaultLegacy(URL);
var Agent__default = /*#__PURE__*/_interopDefaultLegacy(Agent);
var rimraf__default = /*#__PURE__*/_interopDefaultLegacy(rimraf);
const tmpDirName = 'tmp';
const degitConfigName = 'degit.json';
const rimrafSync = dir =>
rimraf__default['default'].sync(dir);
class DegitError extends Error {
constructor(message, opts) {
super(message);
Object.assign(this, opts);
}
}
function tryRequire(file, opts) {
try {
if (opts && opts.clearCache === true) {
delete require.cache[require.resolve(file)];
}
return require(file);
} catch (err) {
return null;
}
}
function exec(command) {
return new Promise((fulfil, reject) => {
child_process__default['default'].exec(command, (err, stdout, stderr) => {
if (err) {
reject(err);
return;
}
fulfil({ stdout, stderr });
});
});
}
function mkdirp(dir) {
const parent = path__default['default'].dirname(dir);
if (parent === dir) return;
mkdirp(parent);
try {
fs__default['default'].mkdirSync(dir);
} catch (err) {
if (err.code !== 'EEXIST') throw err;
}
}
function fetch(url, dest, proxy) {
return new Promise((fulfil, reject) => {
let options = url;
if (proxy) {
const parsedUrl = URL__default['default'].parse(url);
options = {
hostname: parsedUrl.host,
path: parsedUrl.path,
agent: new Agent__default['default'](proxy)
};
}
https__default['default']
.get(options, response => {
const code = response.statusCode;
if (code >= 400) {
reject({ code, message: response.statusMessage });
} else if (code >= 300) {
fetch(response.headers.location, dest, proxy).then(fulfil, reject);
} else {
response
.pipe(fs__default['default'].createWriteStream(dest))
.on('finish', () => fulfil())
.on('error', reject);
}
})
.on('error', reject);
});
}
function stashFiles(dir, dest) {
const tmpDir = path__default['default'].join(dir, tmpDirName);
try {
rimrafSync(tmpDir);
} catch (e) {
if (e.errno !== -2 && e.syscall !== "rmdir" && e.code !== "ENOENT") {
throw e;
}
}
mkdirp(tmpDir);
fs__default['default'].readdirSync(dest).forEach(file => {
const filePath = path__default['default'].join(dest, file);
const targetPath = path__default['default'].join(tmpDir, file);
const isDir = fs__default['default'].lstatSync(filePath).isDirectory();
if (isDir) {
sander.copydirSync(filePath).to(targetPath);
rimrafSync(filePath);
} else {
fs__default['default'].copyFileSync(filePath, targetPath);
fs__default['default'].unlinkSync(filePath);
}
});
}
function unstashFiles(dir, dest) {
const tmpDir = path__default['default'].join(dir, tmpDirName);
fs__default['default'].readdirSync(tmpDir).forEach(filename => {
const tmpFile = path__default['default'].join(tmpDir, filename);
const targetPath = path__default['default'].join(dest, filename);
const isDir = fs__default['default'].lstatSync(tmpFile).isDirectory();
if (isDir) {
sander.copydirSync(tmpFile).to(targetPath);
rimrafSync(tmpFile);
} else {
if (filename !== 'degit.json') {
fs__default['default'].copyFileSync(tmpFile, targetPath);
}
fs__default['default'].unlinkSync(tmpFile);
}
});
rimrafSync(tmpDir);
}
const base = path__default['default'].join(homeOrTmp__default['default'], '.degit');
const validModes = new Set(['tar', 'git']);
function degit(src, opts) {
return new Degit(src, opts);
}
class Degit extends EventEmitter__default['default'] {
constructor(src, opts = {}) {
super();
this.src = src;
this.cache = opts.cache;
this.force = opts.force;
this.verbose = opts.verbose;
this.proxy = process.env.https_proxy; // TODO allow setting via --proxy
this.subgroup = opts.subgroup;
this.subdir = opts["sub-directory"];
this.repo = parse(src);
if (this.subgroup) {
this.repo.subgroup = true;
this.repo.name = this.repo.subdir.slice(1);
this.repo.url = this.repo.url + this.repo.subdir;
this.repo.ssh = this.repo.ssh + this.repo.subdir + ".git";
this.repo.subdir = null;
if (this.subdir) {
this.repo.subdir = this.subdir.startsWith('/') ? this.subdir : `/${this.subdir}`;
}
}
this.mode = opts.mode || this.repo.mode;
if (!validModes.has(this.mode)) {
throw new Error(`Valid modes are ${Array.from(validModes).join(', ')}`);
}
this._hasStashed = false;
this.directiveActions = {
clone: async (dir, dest, action) => {
if (this._hasStashed === false) {
stashFiles(dir, dest);
this._hasStashed = true;
}
const opts = Object.assign(
{ force: true },
{ cache: action.cache, verbose: action.verbose }
);
const d = degit(action.src, opts);
d.on('info', event => {
console.error(colorette.cyan(`> ${event.message.replace('options.', '--')}`));
});
d.on('warn', event => {
console.error(
colorette.magenta(`! ${event.message.replace('options.', '--')}`)
);
});
await d.clone(dest).catch(err => {
console.error(colorette.red(`! ${err.message}`));
process.exit(1);
});
},
remove: this.remove.bind(this)
};
}
_getDirectives(dest) {
const directivesPath = path__default['default'].resolve(dest, degitConfigName);
const directives =
tryRequire(directivesPath, { clearCache: true }) || false;
if (directives) {
fs__default['default'].unlinkSync(directivesPath);
}
return directives;
}
async clone(dest) {
this._checkDirIsEmpty(dest);
const { repo } = this;
const dir = path__default['default'].join(base, repo.site, repo.user, repo.name);
if (this.mode === 'tar') {
await this._cloneWithTar(dir, dest);
} else {
await this._cloneWithGit(dir, dest);
}
this._info({
code: 'SUCCESS',
message: `cloned ${colorette.bold(repo.user + '/' + repo.name)}#${colorette.bold(repo.ref)}${
dest !== '.' ? ` to ${dest}` : ''
}`,
repo,
dest
});
const directives = this._getDirectives(dest);
if (directives) {
for (const d of directives) {
// TODO, can this be a loop with an index to pass for better error messages?
await this.directiveActions[d.action](dir, dest, d);
}
if (this._hasStashed === true) {
unstashFiles(dir, dest);
}
}
}
remove(dir, dest, action) {
let files = action.files;
if (!Array.isArray(files)) {
files = [files];
}
const removedFiles = files
.map(file => {
const filePath = path__default['default'].resolve(dest, file);
if (fs__default['default'].existsSync(filePath)) {
const isDir = fs__default['default'].lstatSync(filePath).isDirectory();
if (isDir) {
rimrafSync(filePath);
return file + '/';
} else {
fs__default['default'].unlinkSync(filePath);
return file;
}
} else {
this._warn({
code: 'FILE_DOES_NOT_EXIST',
message: `action wants to remove ${colorette.bold(
file
)} but it does not exist`
});
return null;
}
})
.filter(d => d);
if (removedFiles.length > 0) {
this._info({
code: 'REMOVED',
message: `removed: ${colorette.bold(removedFiles.map(d => colorette.bold(d)).join(', '))}`
});
}
}
_checkDirIsEmpty(dir) {
try {
const files = fs__default['default'].readdirSync(dir);
if (files.length > 0) {
if (this.force) {
this._info({
code: 'DEST_NOT_EMPTY',
message: `destination directory is not empty. Using options.force, continuing`
});
rimrafSync(dir);
} else {
throw new DegitError(
`destination directory is not empty, aborting. Use options.force to override`,
{
code: 'DEST_NOT_EMPTY'
}
);
}
} else {
this._verbose({
code: 'DEST_IS_EMPTY',
message: `destination directory is empty`
});
}
} catch (err) {
if (err.code !== 'ENOENT') throw err;
}
}
_info(info) {
this.emit('info', info);
}
_warn(info) {
this.emit('warn', info);
}
_verbose(info) {
if (this.verbose) this._info(info);
}
async _getHash(repo, cached) {
try {
const refs = await fetchRefs(repo);
if (repo.ref === 'HEAD') {
return refs.find(ref => ref.type === 'HEAD').hash;
}
return this._selectRef(refs, repo.ref);
} catch (err) {
this._warn(err);
this._verbose(err.original);
return this._getHashFromCache(repo, cached);
}
}
_getHashFromCache(repo, cached) {
if (repo.ref in cached) {
const hash = cached[repo.ref];
this._info({
code: 'USING_CACHE',
message: `using cached commit hash ${hash}`
});
return hash;
}
}
_selectRef(refs, selector) {
for (const ref of refs) {
if (ref.name === selector) {
this._verbose({
code: 'FOUND_MATCH',
message: `found matching commit hash: ${ref.hash}`
});
return ref.hash;
}
}
if (selector.length < 8) return null;
for (const ref of refs) {
if (ref.hash.startsWith(selector)) return ref.hash;
}
}
async _cloneWithTar(dir, dest) {
const { repo } = this;
const cached = tryRequire(path__default['default'].join(dir, 'map.json')) || {};
const hash = this.cache
? this._getHashFromCache(repo, cached)
: await this._getHash(repo, cached);
const subdir = repo.subdir ? `${repo.name}-${hash}${repo.subdir}` : null;
if (!hash) {
// TODO 'did you mean...?'
throw new DegitError(`could not find commit hash for ${repo.ref}`, {
code: 'MISSING_REF',
ref: repo.ref
});
}
const file = `${dir}/${hash}.tar.gz`;
const url =
repo.site === 'gitlab'
? `${repo.url}/-/archive/${hash}/${repo.name}-${hash}.tar.gz`
: repo.site === 'bitbucket'
? `${repo.url}/get/${hash}.tar.gz`
: `${repo.url}/archive/${hash}.tar.gz`;
try {
if (!this.cache) {
try {
fs__default['default'].statSync(file);
this._verbose({
code: 'FILE_EXISTS',
message: `${file} already exists locally`
});
} catch (err) {
mkdirp(path__default['default'].dirname(file));
if (this.proxy) {
this._verbose({
code: 'PROXY',
message: `using proxy ${this.proxy}`
});
}
this._verbose({
code: 'DOWNLOADING',
message: `downloading ${url} to ${file}`
});
await fetch(url, file, this.proxy);
}
}
} catch (err) {
throw new DegitError(`could not download ${url}`, {
code: 'COULD_NOT_DOWNLOAD',
url,
original: err
});
}
updateCache(dir, repo, hash, cached);
this._verbose({
code: 'EXTRACTING',
message: `extracting ${
subdir ? repo.subdir + ' from ' : ''
}${file} to ${dest}`
});
mkdirp(dest);
await untar(file, dest, subdir);
}
async _cloneWithGit(dir, dest) {
if (this.repo.subdir) {
fs__default['default'].mkdirSync(dest);
const tempDir = fs__default['default'].mkdtempSync(`${dest}/.degit`);
await exec(`git clone --depth 1 ${this.repo.ssh} ${tempDir}`);
const files = fs__default['default'].readdirSync(`${tempDir}${this.repo.subdir}`);
files.forEach(file => {
fs__default['default'].renameSync(
`${tempDir}${this.repo.subdir}/${file}`,
`${dest}/${file}`
);
});
rimrafSync(tempDir);
} else {
await exec(`git clone --depth 1 ${this.repo.ssh} ${dest}`);
rimrafSync(path__default['default'].resolve(dest, '.git'));
}
}
}
const supported = {
github: '.com',
gitlab: '.com',
bitbucket: '.com',
'git.sr.ht': '.ht',
};
function parse(src) {
const match = /^(?:(?:https:\/\/)?([^:/]+\.[^:/]+)\/|git@([^:/]+)[:/]|([^/]+):)?([^/\s]+)\/([^/\s#]+)(?:((?:\/[^/\s#]+)+))?(?:\/)?(?:#(.+))?/.exec(
src
);
if (!match) {
throw new DegitError(`could not parse ${src}`, {
code: 'BAD_SRC'
});
}
const site = match[1] || match[2] || match[3] || 'github.com';
const tldMatch = /\.([a-z]{2,})$/.exec(site);
const tld = tldMatch ? tldMatch[0] : null;
const siteName = tld ? site.replace(tld, '') : site;
const user = match[4];
const name = match[5].replace(/\.git$/, '');
const subdir = match[6];
const ref = match[7] || 'HEAD';
const domain = `${siteName}${tld || supported[siteName] || supported[site] || ''}`;
const url = `https://${domain}/${user}/${name}`;
const ssh = `git@${domain}:${user}/${name}`;
const mode = supported[siteName] || supported[site] ? 'tar' : 'git';
return { site: siteName, user, name, ref, url, ssh, subdir, mode };
}
async function untar(file, dest, subdir = null) {
return tar__default['default'].extract(
{
file,
strip: subdir ? subdir.split('/').length : 1,
C: dest
},
subdir ? [subdir] : []
);
}
async function fetchRefs(repo) {
try {
const { stdout } = await exec(`git ls-remote ${repo.url}`);
return stdout
.split('\n')
.filter(Boolean)
.map(row => {
const [hash, ref] = row.split('\t');
if (ref === 'HEAD') {
return {
type: 'HEAD',
hash
};
}
const match = /refs\/(\w+)\/(.+)/.exec(ref);
if (!match)
throw new DegitError(`could not parse ${ref}`, {
code: 'BAD_REF'
});
return {
type:
match[1] === 'heads'
? 'branch'
: match[1] === 'refs'
? 'ref'
: match[1],
name: match[2],
hash
};
});
} catch (error) {
throw new DegitError(`could not fetch remote ${repo.url}`, {
code: 'COULD_NOT_FETCH',
url: repo.url,
original: error
});
}
}
function updateCache(dir, repo, hash, cached) {
// update access logs
const logs = tryRequire(path__default['default'].join(dir, 'access.json')) || {};
logs[repo.ref] = new Date().toISOString();
fs__default['default'].writeFileSync(
path__default['default'].join(dir, 'access.json'),
JSON.stringify(logs, null, ' ')
);
if (cached[repo.ref] === hash) return;
const oldHash = cached[repo.ref];
if (oldHash) {
let used = false;
for (const key in cached) {
if (cached[key] === hash) {
used = true;
break;
}
}
if (!used) {
// we no longer need this tar file
try {
fs__default['default'].unlinkSync(path__default['default'].join(dir, `${oldHash}.tar.gz`));
} catch (err) {
// ignore
}
}
}
cached[repo.ref] = hash;
fs__default['default'].writeFileSync(
path__default['default'].join(dir, 'map.json'),
JSON.stringify(cached, null, ' ')
);
}
exports.base = base;
exports.degit = degit;
exports.tryRequire = tryRequire;
//# sourceMappingURL=index-e42359c2.js.map