isomorphic-git
Version:
A pure JavaScript implementation of git for node and browsers!
1,703 lines (1,599 loc) • 122 kB
JavaScript
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var path = _interopDefault(require('path'));
var debug = _interopDefault(require('debug'));
var pify = _interopDefault(require('pify'));
var BufferCursor = _interopDefault(require('buffercursor'));
var sortby = _interopDefault(require('lodash.sortby'));
var shasum = _interopDefault(require('shasum'));
var crc32 = _interopDefault(require('crc/lib/crc32.js'));
var applyDelta = _interopDefault(require('git-apply-delta'));
var listpack = _interopDefault(require('git-list-pack'));
var marky = require('marky');
var pako = _interopDefault(require('pako'));
var stream = require('stream');
var pad = _interopDefault(require('pad'));
var streamSource = _interopDefault(require('stream-source/index.node.js'));
var ignore = _interopDefault(require('ignore'));
var AsyncLock = _interopDefault(require('async-lock'));
var concat = _interopDefault(require('simple-concat'));
var simpleGet = _interopDefault(require('simple-get'));
var cleanGitRef = require('clean-git-ref');
var split2 = _interopDefault(require('split2'));
var through2 = _interopDefault(require('through2'));
var createHash = _interopDefault(require('sha.js'));
/**
* Use with push and fetch to set Basic Authentication headers.
*
* @link https://isomorphic-git.github.io/docs/utils_auth.html
*/
function auth (username, password) {
// Allow specifying it as one argument (mostly for CLI inputability)
if (password === undefined) {
let i = username.indexOf(':');
if (i > -1) {
password = username.slice(i + 1);
username = username.slice(0, i);
} else {
password = ''; // Enables the .auth(GITHUB_TOKEN) no-username shorthand
}
}
return { username, password }
}
/*::
type Node = {
type: string,
fullpath: string,
basename: string,
metadata: Object, // mode, oid
parent?: Node,
children: Array<Node>
}
*/
function flatFileListToDirectoryStructure (files) {
const inodes = new Map();
const mkdir = function (name) {
if (!inodes.has(name)) {
let dir = {
type: 'tree',
fullpath: name,
basename: path.basename(name),
metadata: {},
children: []
};
inodes.set(name, dir);
// This recursively generates any missing parent folders.
// We do it after we've added the inode to the set so that
// we don't recurse infinitely trying to create the root '.' dirname.
dir.parent = mkdir(path.dirname(name));
if (dir.parent && dir.parent !== dir) dir.parent.children.push(dir);
}
return inodes.get(name)
};
const mkfile = function (name, metadata) {
if (!inodes.has(name)) {
let file = {
type: 'blob',
fullpath: name,
basename: path.basename(name),
metadata: metadata,
// This recursively generates any missing parent folders.
parent: mkdir(path.dirname(name)),
children: []
};
if (file.parent) file.parent.children.push(file);
inodes.set(name, file);
}
return inodes.get(name)
};
mkdir('.');
for (let file of files) {
mkfile(file.path, file);
}
return inodes.get('.')
}
const log = debug('isomorphic-git');
log.log = console.log.bind(console);
/**
* Use with push and fetch to set Basic Authentication headers.
*
* @link https://isomorphic-git.github.io/docs/utils_oauth2.html
*/
function oauth2 (company, token) {
switch (company) {
case 'github':
return {
username: token,
password: 'x-oauth-basic'
}
case 'bitbucket':
return {
username: 'x-token-auth',
password: token
}
case 'gitlab':
return {
username: 'oauth2',
password: token
}
default:
throw new Error(
`I don't know how ${company} expects its Basic Auth headers to be formatted for OAuth2 usage. If you do, you can use the regular '.auth(username, password)' to set the basic auth header yourself.`
)
}
}
const pkg = {
name: 'isomorphic-git',
version: '0.17.1'
};
async function sleep (ms) {
return new Promise((resolve, reject) => setTimeout(resolve, ms))
}
const delayedReleases = new Map();
/**
* This is just a collection of helper functions really. At least that's how it started.
*/
class FileSystem {
constructor (fs) {
if (typeof fs._readFile !== 'undefined') return fs
this._readFile = pify(fs.readFile.bind(fs));
this._writeFile = pify(fs.writeFile.bind(fs));
this._mkdir = pify(fs.mkdir.bind(fs));
this._rmdir = pify(fs.rmdir.bind(fs));
this._unlink = pify(fs.unlink.bind(fs));
this._stat = pify(fs.stat.bind(fs));
this._lstat = pify(fs.lstat.bind(fs));
this._readdir = pify(fs.readdir.bind(fs));
}
/**
* Return true if a file exists, false if it doesn't exist.
* Rethrows errors that aren't related to file existance.
*/
async exists (filepath, options = {}) {
try {
await this._stat(filepath);
return true
} catch (err) {
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
return false
} else {
console.log('Unhandled error in "FileSystem.exists()" function', err);
throw err
}
}
}
/**
* Return the contents of a file if it exists, otherwise returns null.
*/
async read (filepath, options = {}) {
try {
let buffer = await this._readFile(filepath, options);
return buffer
} catch (err) {
return null
}
}
/**
* Write a file (creating missing directories if need be) without throwing errors.
*/
async write (filepath, contents, options = {}) {
try {
await this._writeFile(filepath, contents, options);
return
} catch (err) {
// Hmm. Let's try mkdirp and try again.
await this.mkdir(path.dirname(filepath));
await this._writeFile(filepath, contents, options);
}
}
/**
* Make a directory (or series of nested directories) without throwing an error if it already exists.
*/
async mkdir (filepath) {
try {
await this._mkdir(filepath);
return
} catch (err) {
// If err is null then operation succeeded!
if (err === null) return
// If the directory already exists, that's OK!
if (err.code === 'EEXIST') return
// If we got a "no such file or directory error" backup and try again.
if (err.code === 'ENOENT') {
let parent = path.dirname(filepath);
// Check to see if we've gone too far
if (parent === '.' || parent === '/' || parent === filepath) throw err
// Infinite recursion, what could go wrong?
await this.mkdir(parent);
await this._mkdir(filepath);
}
}
}
/**
* Delete a file without throwing an error if it is already deleted.
*/
async rm (filepath) {
try {
await this._unlink(filepath);
} catch (err) {
if (err.code !== 'ENOENT') throw err
}
}
/**
* Read a directory without throwing an error is the directory doesn't exist
*/
async readdir (filepath) {
try {
return await this._readdir(filepath)
} catch (err) {
return []
}
}
/**
* Return a flast list of all the files nested inside a directory
*
* Based on an elegant concurrent recursive solution from SO
* https://stackoverflow.com/a/45130990/2168416
*/
async readdirDeep (dir) {
const subdirs = await this._readdir(dir);
const files = await Promise.all(
subdirs.map(async subdir => {
const res = dir + '/' + subdir;
return (await this._stat(res)).isDirectory()
? this.readdirDeep(res)
: res
})
);
return files.reduce((a, f) => a.concat(f), [])
}
async lock (filename, triesLeft = 3) {
// check to see if we still have it
if (delayedReleases.has(filename)) {
clearTimeout(delayedReleases.get(filename));
delayedReleases.delete(filename);
return
}
if (triesLeft === 0) {
throw new Error(
`Unable to acquire lockfile '${filename}'. Exhausted tries.`
)
}
try {
await this._mkdir(`${filename}.lock`);
} catch (err) {
if (err.code === 'EEXIST') {
await sleep(100);
await this.lock(filename, triesLeft - 1);
}
}
}
async unlock (filename, delayRelease = 50) {
if (delayedReleases.has(filename)) {
throw new Error('Cannot double-release lockfile')
}
// Basically, we lie and say it was deleted ASAP.
// But really we wait a bit to see if you want to acquire it again.
delayedReleases.set(
filename,
setTimeout(async () => {
delayedReleases.delete(filename);
await this._rmdir(`${filename}.lock`);
}, delayRelease)
);
}
}
// The amount of work that went into crafting these cases to handl
// -0 (just so we don't lose that information when parsing and reconstructing)
// but can also default to +0 was extraordinary.
function simpleSign (n) {
return Math.sign(n) || (Object.is(n, -0) ? -1 : 1)
}
function negateExceptForZero (n) {
return n === 0 ? n : -n
}
function formatTimezoneOffset (minutes) {
let sign = simpleSign(negateExceptForZero(minutes));
minutes = Math.abs(minutes);
let hours = Math.floor(minutes / 60);
minutes -= hours * 60;
let strHours = String(hours);
let strMinutes = String(minutes);
if (strHours.length < 2) strHours = '0' + strHours;
if (strMinutes.length < 2) strMinutes = '0' + strMinutes;
return (sign === -1 ? '-' : '+') + strHours + strMinutes
}
function parseTimezoneOffset (offset) {
let [, sign, hours, minutes] = offset.match(/(\+|-)(\d\d)(\d\d)/);
minutes = (sign === '+' ? 1 : -1) * (Number(hours) * 60 + Number(minutes));
return negateExceptForZero(minutes)
}
function parseAuthor (author) {
let [, name, email, timestamp, offset] = author.match(
/^(.*) <(.*)> (.*) (.*)$/
);
return {
name: name,
email: email,
timestamp: Number(timestamp),
timezoneOffset: parseTimezoneOffset(offset)
}
}
function normalize (str) {
// remove all <CR>
str = str.replace(/\r/g, '');
// no extra newlines up front
str = str.replace(/^\n+/, '');
// and a single newline at the end
str = str.replace(/\n+$/, '') + '\n';
return str
}
function indent (str) {
return (
str
.trim()
.split('\n')
.map(x => ' ' + x)
.join('\n') + '\n'
)
}
function outdent (str) {
return str
.split('\n')
.map(x => x.replace(/^ /, ''))
.join('\n')
}
// TODO: Make all functions have static async signature?
class GitCommit {
constructor (commit) {
if (typeof commit === 'string') {
this._commit = commit;
} else if (Buffer.isBuffer(commit)) {
this._commit = commit.toString('utf8');
} else if (typeof commit === 'object') {
this._commit = GitCommit.render(commit);
} else {
throw new Error('invalid type passed to GitCommit constructor')
}
}
static fromPayloadSignature ({ payload, signature }) {
let headers = GitCommit.justHeaders(payload);
let message = GitCommit.justMessage(payload);
let commit = normalize(
headers + '\ngpgsig' + indent(signature) + '\n' + message
);
return new GitCommit(commit)
}
static from (commit) {
return new GitCommit(commit)
}
toObject () {
return Buffer.from(this._commit, 'utf8')
}
// Todo: allow setting the headers and message
headers () {
return this.parseHeaders()
}
// Todo: allow setting the headers and message
message () {
return GitCommit.justMessage(this._commit)
}
parse () {
return Object.assign({ message: this.message() }, this.headers())
}
static justMessage (commit) {
return normalize(commit.slice(commit.indexOf('\n\n') + 2))
}
static justHeaders (commit) {
return commit.slice(0, commit.indexOf('\n\n'))
}
parseHeaders () {
let headers = GitCommit.justHeaders(this._commit).split('\n');
let hs = [];
for (let h of headers) {
if (h[0] === ' ') {
// combine with previous header (without space indent)
hs[hs.length - 1] += '\n' + h.slice(1);
} else {
hs.push(h);
}
}
let obj = {};
for (let h of hs) {
let key = h.slice(0, h.indexOf(' '));
let value = h.slice(h.indexOf(' ') + 1);
obj[key] = value;
}
obj.parent = obj.parent ? obj.parent.split(' ') : [];
if (obj.author) {
obj.author = parseAuthor(obj.author);
}
if (obj.committer) {
obj.committer = parseAuthor(obj.committer);
}
return obj
}
static renderHeaders (obj) {
let headers = '';
if (obj.tree) {
headers += `tree ${obj.tree}\n`;
} else {
headers += `tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904\n`; // the null tree
}
if (obj.parent && obj.parent.length) {
headers += 'parent';
for (let p of obj.parent) {
headers += ' ' + p;
}
headers += '\n';
}
let author = obj.author;
headers += `author ${author.name} <${author.email}> ${
author.timestamp
} ${formatTimezoneOffset(author.timezoneOffset)}\n`;
let committer = obj.committer || obj.author;
headers += `committer ${committer.name} <${committer.email}> ${
committer.timestamp
} ${formatTimezoneOffset(committer.timezoneOffset)}\n`;
if (obj.gpgsig) {
headers += 'gpgsig' + indent(obj.gpgsig);
}
return headers
}
static render (obj) {
return GitCommit.renderHeaders(obj) + '\n' + normalize(obj.message)
}
render () {
return this._commit
}
withoutSignature () {
let commit = normalize(this._commit);
if (commit.indexOf('\ngpgsig') === -1) return commit
let headers = commit.slice(0, commit.indexOf('\ngpgsig'));
let message = commit.slice(
commit.indexOf('-----END PGP SIGNATURE-----\n') +
'-----END PGP SIGNATURE-----\n'.length
);
return normalize(headers + '\n' + message)
}
isolateSignature () {
let signature = this._commit.slice(
this._commit.indexOf('-----BEGIN PGP SIGNATURE-----'),
this._commit.indexOf('-----END PGP SIGNATURE-----') +
'-----END PGP SIGNATURE-----'.length
);
return outdent(signature)
}
}
// This is straight from parse_unit_factor in config.c of canonical git
const num = val => {
val = val.toLowerCase();
let n = parseInt(val);
if (val.endsWith('k')) n *= 1024;
if (val.endsWith('m')) n *= 1024 * 1024;
if (val.endsWith('g')) n *= 1024 * 1024 * 1024;
return n
};
// This is straight from git_parse_maybe_bool_text in config.c of canonical git
const bool = val => {
val = val.trim().toLowerCase();
if (val === 'true' || val === 'yes' || val === 'on') return true
if (val === 'false' || val === 'no' || val === 'off') return false
throw Error(
`Expected 'true', 'false', 'yes', 'no', 'on', or 'off', but got ${val}`
)
};
const schema = {
core: {
_named: false,
repositoryformatversion: String,
filemode: bool,
bare: bool,
logallrefupdates: bool,
symlinks: bool,
ignorecase: bool,
bigFileThreshold: num
},
remote: {
_named: true,
url: String,
fetch: String
},
branch: {
_named: true,
remote: String,
merge: String
}
};
const isSection = line => line.trim().startsWith('[');
const extractSection = line => {
const indices = [line.indexOf(']'), line.indexOf(' ')].filter(i => i > -1);
return line.slice(line.indexOf('[') + 1, Math.min(...indices)).trim()
};
const isNamedSection = section => schema[section] && schema[section]._named;
const isKeyValuePair = line => line.includes('=');
const extractSectionName = line =>
line.slice(line.indexOf('"') + 1, line.lastIndexOf('"'));
// Note: there are a LOT of edge cases that aren't covered (e.g. keys in sections that also
// have subsections, [include] directives, etc.
class GitConfig {
constructor (text) {
this.lines = text.split('\n');
}
static from (text) {
return new GitConfig(text)
}
async get (path$$1, getall = false) {
const parts = path$$1.split('.');
const section = parts.shift();
const sectionName = isNamedSection(section) ? parts.shift() : null;
const key = parts.shift();
let currentSection = '';
let currentSectionName = null;
let allValues = [];
for (const line of this.lines) {
// zero in on section
if (isSection(line)) {
currentSection = extractSection(line);
if (isNamedSection(currentSection)) {
currentSectionName = extractSectionName(line);
}
} else if (
currentSection === section &&
(sectionName === null || currentSectionName === sectionName)
) {
if (isKeyValuePair(line)) {
let [_key, _value] = line.split('=', 2);
if (_key.trim() === key) {
allValues.push(_value.trim());
}
}
}
}
// Cast value to correct type
let fn = schema[section] && schema[section][key];
if (fn) {
allValues = allValues.map(fn);
}
return getall ? allValues : allValues.pop()
}
async getall (path$$1) {
return this.get(path$$1, true)
}
async append (path$$1, value) {
return this.set(path$$1, value, true)
}
async set (path$$1, value, append = false) {
const parts = path$$1.split('.');
const section = parts.shift();
const sectionName = isNamedSection(section) ? parts.shift() : null;
const key = parts.shift();
let currentSection = '';
let currentSectionName = null;
let lastSectionMatch = null;
let lastMatch = null;
for (let i = 0; i < this.lines.length; i++) {
const line = this.lines[i];
if (isSection(line)) {
currentSection = extractSection(line);
if (currentSection === section) {
if (sectionName) {
currentSectionName = extractSectionName(line);
}
if (currentSectionName === sectionName) {
lastSectionMatch = i;
}
} else {
currentSectionName = null;
}
} else if (
currentSection === section &&
(sectionName === null || currentSectionName === sectionName)
) {
if (isKeyValuePair(line)) {
let [_key] = line.split('=', 1);
if (_key.trim() === key) {
lastMatch = i;
}
}
}
}
if (lastMatch !== null) {
if (value === undefined) {
this.lines.splice(lastMatch, 1);
} else if (append) {
this.lines.splice(lastMatch + 1, 0, [`\t${key} = ${value}`]);
} else {
this.lines[lastMatch] = `\t${key} = ${value}`;
}
} else if (lastSectionMatch !== null) {
if (value !== undefined) {
this.lines.splice(lastSectionMatch + 1, 0, [`\t${key} = ${value}`]);
}
} else if (value !== undefined) {
if (sectionName) {
this.lines.push(`[${section} "${sectionName}"]`);
} else {
this.lines.push(`[${section}]`);
}
this.lines.push([`\t${key} = ${value}`]);
}
}
toString () {
return this.lines.join('\n') + '\n'
}
}
const MAX_UINT32 = 2 ** 32;
/*::
import type {Stats} from 'fs'
type CacheEntryFlags = {
assumeValid: boolean,
extended: boolean,
stage: number,
nameLength: number
}
type CacheEntry = {
ctime: Date,
ctimeNanoseconds?: number,
mtime: Date,
mtimeNanoseconds?: number,
dev: number,
ino: number,
mode: number,
uid: number,
gid: number,
size: number,
oid: string,
flags: CacheEntryFlags,
path: string
}
*/
// Extract 1-bit assume-valid, 1-bit extended flag, 2-bit merge state flag, 12-bit path length flag
function parseCacheEntryFlags (bits) {
return {
assumeValid: Boolean(bits & 0b1000000000000000),
extended: Boolean(bits & 0b0100000000000000),
stage: (bits & 0b0011000000000000) >> 12,
nameLength: bits & 0b0000111111111111
}
}
function renderCacheEntryFlags (flags) {
return (
(flags.assumeValid ? 0b1000000000000000 : 0) +
(flags.extended ? 0b0100000000000000 : 0) +
((flags.stage & 0b11) << 12) +
(flags.nameLength & 0b111111111111)
)
}
function parseBuffer (buffer) {
// Verify shasum
let shaComputed = shasum(buffer.slice(0, -20));
let shaClaimed = buffer.slice(-20).toString('hex');
if (shaClaimed !== shaComputed) {
throw new Error(
`Invalid checksum in GitIndex buffer: expected ${shaClaimed} but saw ${shaComputed}`
)
}
let reader = new BufferCursor(buffer);
let _entries = new Map();
let magic = reader.toString('utf8', 4);
if (magic !== 'DIRC') {
throw new Error(`Inavlid dircache magic file number: ${magic}`)
}
let version = reader.readUInt32BE();
if (version !== 2) throw new Error(`Unsupported dircache version: ${version}`)
let numEntries = reader.readUInt32BE();
let i = 0;
while (!reader.eof() && i < numEntries) {
let entry = {};
let ctimeSeconds = reader.readUInt32BE();
let ctimeNanoseconds = reader.readUInt32BE();
entry.ctime = new Date(ctimeSeconds * 1000 + ctimeNanoseconds / 1000000);
entry.ctimeNanoseconds = ctimeNanoseconds;
let mtimeSeconds = reader.readUInt32BE();
let mtimeNanoseconds = reader.readUInt32BE();
entry.mtime = new Date(mtimeSeconds * 1000 + mtimeNanoseconds / 1000000);
entry.mtimeNanoseconds = mtimeNanoseconds;
entry.dev = reader.readUInt32BE();
entry.ino = reader.readUInt32BE();
entry.mode = reader.readUInt32BE();
entry.uid = reader.readUInt32BE();
entry.gid = reader.readUInt32BE();
entry.size = reader.readUInt32BE();
entry.oid = reader.slice(20).toString('hex');
let flags = reader.readUInt16BE();
entry.flags = parseCacheEntryFlags(flags);
// TODO: handle if (version === 3 && entry.flags.extended)
let pathlength = buffer.indexOf(0, reader.tell() + 1) - reader.tell();
if (pathlength < 1) throw new Error(`Got a path length of: ${pathlength}`)
entry.path = reader.toString('utf8', pathlength);
// The next bit is awkward. We expect 1 to 8 null characters
let tmp = reader.readUInt8();
if (tmp !== 0) {
throw new Error(`Expected 1-8 null characters but got '${tmp}'`)
}
let numnull = 1;
while (!reader.eof() && reader.readUInt8() === 0 && numnull < 9) numnull++;
reader.seek(reader.tell() - 1);
// end of awkward part
_entries.set(entry.path, entry);
i++;
}
return _entries
}
class GitIndex {
/*::
_entries: Map<string, CacheEntry>
_dirty: boolean // Used to determine if index needs to be saved to filesystem
*/
constructor (index) {
this._dirty = false;
if (Buffer.isBuffer(index)) {
this._entries = parseBuffer(index);
} else if (index === null) {
this._entries = new Map();
} else {
throw new Error('invalid type passed to GitIndex constructor')
}
}
static from (buffer) {
return new GitIndex(buffer)
}
get entries () {
return sortby([...this._entries.values()], 'path')
}
* [Symbol.iterator] () {
for (let entry of this.entries) {
yield entry;
}
}
insert ({ filepath, stats, oid }) {
let entry = {
ctime: stats.ctime,
mtime: stats.mtime,
dev: stats.dev % MAX_UINT32,
ino: stats.ino % MAX_UINT32,
mode: stats.mode % MAX_UINT32,
uid: stats.uid % MAX_UINT32,
gid: stats.gid % MAX_UINT32,
size: stats.size % MAX_UINT32,
path: filepath,
oid: oid,
flags: {
assumeValid: false,
extended: false,
stage: 0,
nameLength: filepath.length < 0xfff ? filepath.length : 0xfff
}
};
this._entries.set(entry.path, entry);
this._dirty = true;
}
delete ({ filepath }) {
if (this._entries.has(filepath)) {
this._entries.delete(filepath);
} else {
for (let key of this._entries.keys()) {
if (key.startsWith(filepath + '/')) {
this._entries.delete(key);
}
}
}
this._dirty = true;
}
clear () {
this._entries.clear();
this._dirty = true;
}
render () {
return this.entries
.map(entry => `${entry.mode.toString(8)} ${entry.oid} ${entry.path}`)
.join('\n')
}
toObject () {
let header = Buffer.alloc(12);
let writer = new BufferCursor(header);
writer.write('DIRC', 4, 'utf8');
writer.writeUInt32BE(2);
writer.writeUInt32BE(this.entries.length);
let body = Buffer.concat(
this.entries.map(entry => {
// the fixed length + the filename + at least one null char => align by 8
let length = Math.ceil((62 + entry.path.length + 1) / 8) * 8;
let written = Buffer.alloc(length);
let writer = new BufferCursor(written);
let ctimeMilliseconds = entry.ctime.valueOf();
let ctimeSeconds = Math.floor(ctimeMilliseconds / 1000);
let ctimeNanoseconds =
entry.ctimeNanoseconds ||
ctimeMilliseconds * 1000000 - ctimeSeconds * 1000000 * 1000;
let mtimeMilliseconds = entry.mtime.valueOf();
let mtimeSeconds = Math.floor(mtimeMilliseconds / 1000);
let mtimeNanoseconds =
entry.mtimeNanoseconds ||
mtimeMilliseconds * 1000000 - mtimeSeconds * 1000000 * 1000;
writer.writeUInt32BE(ctimeSeconds % MAX_UINT32);
writer.writeUInt32BE(ctimeNanoseconds % MAX_UINT32);
writer.writeUInt32BE(mtimeSeconds % MAX_UINT32);
writer.writeUInt32BE(mtimeNanoseconds % MAX_UINT32);
writer.writeUInt32BE(entry.dev % MAX_UINT32);
writer.writeUInt32BE(entry.ino % MAX_UINT32);
writer.writeUInt32BE(entry.mode % MAX_UINT32);
writer.writeUInt32BE(entry.uid % MAX_UINT32);
writer.writeUInt32BE(entry.gid % MAX_UINT32);
writer.writeUInt32BE(entry.size % MAX_UINT32);
writer.write(entry.oid, 20, 'hex');
writer.writeUInt16BE(renderCacheEntryFlags(entry.flags));
writer.write(entry.path, entry.path.length, 'utf8');
return written
})
);
let main = Buffer.concat([header, body]);
let sum = shasum(main);
return Buffer.concat([main, Buffer.from(sum, 'hex')])
}
}
class GitObject {
static hash ({ type, object }) {
let buffer = Buffer.concat([
Buffer.from(`${type} ${object.byteLength.toString()}\0`),
Buffer.from(object)
]);
let oid = shasum(buffer);
return oid
}
static wrap ({ type, object }) {
let buffer = Buffer.concat([
Buffer.from(`${type} ${object.byteLength.toString()}\0`),
object
]);
let oid = shasum(buffer);
return {
oid,
buffer
}
}
static unwrap ({ oid, buffer }) {
if (oid) {
let sha = shasum(buffer);
if (sha !== oid) {
throw new Error(`SHA check failed! Expected ${oid}, computed ${sha}`)
}
}
let s = buffer.indexOf(32); // first space
let i = buffer.indexOf(0); // first null value
let type = buffer.slice(0, s).toString('utf8'); // get type of object
let length = buffer.slice(s + 1, i).toString('utf8'); // get type of object
let actualLength = buffer.length - (i + 1);
// verify length
if (parseInt(length) !== actualLength) {
throw new Error(
`Length mismatch: expected ${length} bytes but got ${actualLength} instead.`
)
}
return {
type,
object: Buffer.from(buffer.slice(i + 1))
}
}
}
function buffer2stream (buffer) {
let stream$$1 = new stream.PassThrough();
stream$$1.end(buffer);
return stream$$1
}
function decodeVarInt (reader) {
let bytes = [];
let byte = 0;
let multibyte = 0;
do {
byte = reader.readUInt8();
// We keep bits 6543210
const lastSeven = byte & 0b01111111;
bytes.push(lastSeven);
// Whether the next byte is part of the variable-length encoded number
// is encoded in bit 7
multibyte = byte & 0b10000000;
} while (multibyte)
// Now that all the bytes are in big-endian order,
// alternate shifting the bits left by 7 and OR-ing the next byte.
// And... do a weird increment-by-one thing that I don't quite understand.
return bytes.reduce((a, b) => ((a + 1) << 7) | b, -1)
}
// I'm pretty much copying this one from the git C source code,
// because it makes no sense.
function otherVarIntDecode (reader, startWith) {
let result = startWith;
let shift = 4;
let byte = null;
do {
byte = reader.readUInt8();
result |= (byte & 0b01111111) << shift;
shift += 7;
} while (byte & 0b10000000)
return result
}
class GitPackIndex {
constructor (stuff) {
Object.assign(this, stuff);
this.offsetCache = {};
}
static async fromIdx ({ idx, getExternalRefDelta }) {
let reader = new BufferCursor(idx);
let magic = reader.slice(4).toString('hex');
// Check for IDX v2 magic number
if (magic !== 'ff744f63') {
return // undefined
}
let version = reader.readUInt32BE();
if (version !== 2) {
throw new Error(
`Unable to read version ${version} packfile IDX. (Only version 2 supported)`
)
}
// Verify checksums
let shaComputed = shasum(idx.slice(0, -20));
let shaClaimed = idx.slice(-20).toString('hex');
if (shaClaimed !== shaComputed) {
throw new Error(
`Invalid checksum in IDX buffer: expected ${shaClaimed} but saw ${shaComputed}`
)
}
if (idx.byteLength > 2048 * 1024 * 1024) {
throw new Error(
`To keep implementation simple, I haven't implemented the layer 5 feature needed to support packfiles > 2GB in size.`
)
}
let fanout = [];
for (let i = 0; i < 256; i++) {
fanout.push(reader.readUInt32BE());
}
let size = fanout[255];
// For now we'll parse the whole thing. We can optimize later if we need to.
let hashes = [];
for (let i = 0; i < size; i++) {
hashes.push(reader.slice(20).toString('hex'));
}
let crcs = {};
for (let i = 0; i < size; i++) {
crcs[hashes[i]] = reader.readUInt32BE();
}
let offsets = {};
for (let i = 0; i < size; i++) {
offsets[hashes[i]] = reader.readUInt32BE();
}
let packfileSha = reader.slice(20).toString('hex');
return new GitPackIndex({
hashes,
crcs,
offsets,
packfileSha,
getExternalRefDelta
})
}
static async fromPack ({ pack, getExternalRefDelta }) {
const listpackTypes = {
1: 'commit',
2: 'tree',
3: 'blob',
4: 'tag',
6: 'ofs-delta',
7: 'ref-delta'
};
let offsetToObject = {};
// Older packfiles do NOT use the shasum of the pack itself,
// so it is recommended to just use whatever bytes are in the trailer.
// Source: https://github.com/git/git/commit/1190a1acf800acdcfd7569f87ac1560e2d077414
// let packfileSha = shasum(pack.slice(0, -20))
let packfileSha = pack.slice(-20).toString('hex');
let hashes = [];
let crcs = {};
let offsets = {};
let totalObjectCount = null;
let lastPercent = null;
let times = {
hash: 0,
readSlice: 0,
offsets: 0,
crcs: 0,
sort: 0
};
let histogram = {
commit: 0,
tree: 0,
blob: 0,
tag: 0,
'ofs-delta': 0,
'ref-delta': 0
};
let bytesProcessed = 0;
log('Indexing objects');
log(
`percent\tmilliseconds\tbytesProcessed\tcommits\ttrees\tblobs\ttags\tofs-deltas\tref-deltas`
);
marky.mark('total');
marky.mark('offsets');
marky.mark('percent');
await new Promise((resolve, reject) => {
buffer2stream(pack)
.pipe(listpack())
.on('data', async ({ data, type, reference, offset, num }) => {
if (totalObjectCount === null) totalObjectCount = num;
let percent = Math.floor(
(totalObjectCount - num) * 100 / totalObjectCount
);
if (percent !== lastPercent) {
log(
`${percent}%\t${Math.floor(
marky.stop('percent').duration
)}\t${bytesProcessed}\t${histogram.commit}\t${histogram.tree}\t${
histogram.blob
}\t${histogram.tag}\t${histogram['ofs-delta']}\t${
histogram['ref-delta']
}`
);
histogram = {
commit: 0,
tree: 0,
blob: 0,
tag: 0,
'ofs-delta': 0,
'ref-delta': 0
};
bytesProcessed = 0;
marky.mark('percent');
}
lastPercent = percent;
// Change type from a number to a meaningful string
type = listpackTypes[type];
histogram[type]++;
bytesProcessed += data.byteLength;
if (['commit', 'tree', 'blob', 'tag'].includes(type)) {
offsetToObject[offset] = {
type,
offset
};
} else if (type === 'ofs-delta') {
offsetToObject[offset] = {
type,
offset
};
} else if (type === 'ref-delta') {
offsetToObject[offset] = {
type,
offset
};
}
if (num === 0) resolve();
});
});
times['offsets'] = Math.floor(marky.stop('offsets').duration);
log('Computing CRCs');
marky.mark('crcs');
// We need to know the lengths of the slices to compute the CRCs.
let offsetArray = Object.keys(offsetToObject).map(Number);
for (let [i, start] of offsetArray.entries()) {
let end =
i + 1 === offsetArray.length ? pack.byteLength - 20 : offsetArray[i + 1];
let o = offsetToObject[start];
let crc = crc32(pack.slice(start, end));
o.end = end;
o.crc = crc;
}
times['crcs'] = Math.floor(marky.stop('crcs').duration);
// We don't have the hashes yet. But we can generate them using the .readSlice function!
const p = new GitPackIndex({
pack,
packfileSha,
crcs,
hashes,
offsets,
getExternalRefDelta
});
// Resolve deltas and compute the oids
log('Resolving deltas');
log(`percent2\tmilliseconds2\tcallsToReadSlice\tcallsToGetExternal`);
marky.mark('percent');
lastPercent = null;
let count = 0;
let callsToReadSlice = 0;
let callsToGetExternal = 0;
let timeByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let objectsByDepth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
for (let offset in offsetToObject) {
offset = Number(offset);
let percent = Math.floor(count++ * 100 / totalObjectCount);
if (percent !== lastPercent) {
log(
`${percent}%\t${Math.floor(
marky.stop('percent').duration
)}\t${callsToReadSlice}\t${callsToGetExternal}`
);
marky.mark('percent');
callsToReadSlice = 0;
callsToGetExternal = 0;
}
lastPercent = percent;
let o = offsetToObject[offset];
if (o.oid) continue
try {
p.readDepth = 0;
p.externalReadDepth = 0;
marky.mark('readSlice');
let { type, object } = await p.readSlice({ start: offset });
let time = marky.stop('readSlice').duration;
times.readSlice += time;
callsToReadSlice += p.readDepth;
callsToGetExternal += p.externalReadDepth;
timeByDepth[p.readDepth] += time;
objectsByDepth[p.readDepth] += 1;
marky.mark('hash');
let oid = GitObject.hash({ type, object });
times.hash += marky.stop('hash').duration;
o.oid = oid;
hashes.push(oid);
offsets[oid] = offset;
crcs[oid] = o.crc;
} catch (err) {
log('ERROR', err);
continue
}
}
marky.mark('sort');
hashes.sort();
times['sort'] = Math.floor(marky.stop('sort').duration);
let totalElapsedTime = marky.stop('total').duration;
times.hash = Math.floor(times.hash);
times.readSlice = Math.floor(times.readSlice);
times.misc = Math.floor(
Object.values(times).reduce((a, b) => a - b, totalElapsedTime)
);
log(Object.keys(times).join('\t'));
log(Object.values(times).join('\t'));
log('by depth:');
log([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11].join('\t'));
log(objectsByDepth.slice(0, 12).join('\t'));
log(
timeByDepth
.map(Math.floor)
.slice(0, 12)
.join('\t')
);
return p
}
toBuffer () {
let buffers = [];
let write = (str, encoding) => {
buffers.push(Buffer.from(str, encoding));
};
// Write out IDX v2 magic number
write('ff744f63', 'hex');
// Write out version number 2
write('00000002', 'hex');
// Write fanout table
let fanoutBuffer = new BufferCursor(Buffer.alloc(256 * 4));
for (let i = 0; i < 256; i++) {
let count = 0;
for (let hash of this.hashes) {
if (parseInt(hash.slice(0, 2), 16) <= i) count++;
}
fanoutBuffer.writeUInt32BE(count);
}
buffers.push(fanoutBuffer.buffer);
// Write out hashes
for (let hash of this.hashes) {
write(hash, 'hex');
}
// Write out crcs
let crcsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4));
for (let hash of this.hashes) {
crcsBuffer.writeUInt32BE(this.crcs[hash]);
}
buffers.push(crcsBuffer.buffer);
// Write out offsets
let offsetsBuffer = new BufferCursor(Buffer.alloc(this.hashes.length * 4));
for (let hash of this.hashes) {
offsetsBuffer.writeUInt32BE(this.offsets[hash]);
}
buffers.push(offsetsBuffer.buffer);
// Write out packfile checksum
write(this.packfileSha, 'hex');
// Write out shasum
let totalBuffer = Buffer.concat(buffers);
let sha = shasum(totalBuffer);
let shaBuffer = Buffer.alloc(20);
shaBuffer.write(sha, 'hex');
return Buffer.concat([totalBuffer, shaBuffer])
}
async load ({ pack }) {
this.pack = pack;
}
async unload () {
this.pack = null;
}
async read ({ oid }) {
if (!this.offsets[oid]) {
if (this.getExternalRefDelta) {
this.externalReadDepth++;
return this.getExternalRefDelta(oid)
} else {
throw new Error(`Could not read object ${oid} from packfile`)
}
}
let start = this.offsets[oid];
return this.readSlice({ start })
}
async readSlice ({ start }) {
if (this.offsetCache[start]) return this.offsetCache[start]
this.readDepth++;
const types = {
0b0010000: 'commit',
0b0100000: 'tree',
0b0110000: 'blob',
0b1000000: 'tag',
0b1100000: 'ofs_delta',
0b1110000: 'ref_delta'
};
if (!this.pack) {
throw new Error(
'Tried to read from a GitPackIndex with no packfile loaded into memory'
)
}
let raw = this.pack.slice(start);
let reader = new BufferCursor(raw);
let byte = reader.readUInt8();
// Object type is encoded in bits 654
let btype = byte & 0b1110000;
let type = types[btype];
if (type === undefined) {
throw new Error('Unrecognized type: 0b' + btype.toString(2))
}
// The length encoding get complicated.
// Last four bits of length is encoded in bits 3210
let lastFour = byte & 0b1111;
let length = lastFour;
// Whether the next byte is part of the variable-length encoded number
// is encoded in bit 7
let multibyte = byte & 0b10000000;
if (multibyte) {
length = otherVarIntDecode(reader, lastFour);
}
let base = null;
let object = null;
// Handle deltified objects
if (type === 'ofs_delta') {
let offset = decodeVarInt(reader);
let baseOffset = start - offset
;({ object: base, type } = await this.readSlice({ start: baseOffset }));
}
if (type === 'ref_delta') {
let oid = reader.slice(20).toString('hex')
;({ object: base, type } = await this.read({ oid }));
}
// Handle undeltified objects
let buffer = raw.slice(reader.tell());
object = Buffer.from(pako.inflate(buffer));
// Assert that the object length is as expected.
if (object.byteLength !== length) {
throw new Error(
`Packfile told us object would have length ${length} but it had length ${
object.byteLength
}`
)
}
if (base) {
object = Buffer.from(applyDelta(object, base));
}
// Cache the result based on depth.
if (this.readDepth > 3) {
// hand tuned for speed / memory usage tradeoff
this.offsetCache[start] = { type, object };
}
return { type, format: 'content', object }
}
}
/**
pkt-line Format
---------------
Much (but not all) of the payload is described around pkt-lines.
A pkt-line is a variable length binary string. The first four bytes
of the line, the pkt-len, indicates the total length of the line,
in hexadecimal. The pkt-len includes the 4 bytes used to contain
the length's hexadecimal representation.
A pkt-line MAY contain binary data, so implementors MUST ensure
pkt-line parsing/formatting routines are 8-bit clean.
A non-binary line SHOULD BE terminated by an LF, which if present
MUST be included in the total length. Receivers MUST treat pkt-lines
with non-binary data the same whether or not they contain the trailing
LF (stripping the LF if present, and not complaining when it is
missing).
The maximum length of a pkt-line's data component is 65516 bytes.
Implementations MUST NOT send pkt-line whose length exceeds 65520
(65516 bytes of payload + 4 bytes of length data).
Implementations SHOULD NOT send an empty pkt-line ("0004").
A pkt-line with a length field of 0 ("0000"), called a flush-pkt,
is a special case and MUST be handled differently than an empty
pkt-line ("0004").
----
pkt-line = data-pkt / flush-pkt
data-pkt = pkt-len pkt-payload
pkt-len = 4*(HEXDIG)
pkt-payload = (pkt-len - 4)*(OCTET)
flush-pkt = "0000"
----
Examples (as C-style strings):
----
pkt-line actual value
---------------------------------
"0006a\n" "a\n"
"0005a" "a"
"000bfoobar\n" "foobar\n"
"0004" ""
----
*/
// I'm really using this more as a namespace.
// There's not a lot of "state" in a pkt-line
class GitPktLine {
static flush () {
return Buffer.from('0000', 'utf8')
}
static encode (line) {
if (typeof line === 'string') {
line = Buffer.from(line);
}
let length = line.length + 4;
let hexlength = pad(4, length.toString(16), '0');
return Buffer.concat([Buffer.from(hexlength, 'utf8'), line])
}
static reader (buffer) {
let buffercursor = new BufferCursor(buffer);
return async function read () {
if (buffercursor.eof()) return true
let length = parseInt(buffercursor.slice(4).toString('utf8'), 16);
if (length === 0) return null
return buffercursor.slice(length - 4).buffer
}
}
static streamReader (stream$$1) {
const bufferstream = streamSource(stream$$1);
return async function read () {
try {
let length = await bufferstream.slice(4);
if (length === null) return true
length = parseInt(length.toString('utf8'), 16);
if (length === 0) return null
let buffer = await bufferstream.slice(length - 4);
if (buffer === null) return true
return buffer
} catch (err) {
console.log('error', err);
return true
}
}
}
}
class GitRefSpec {
constructor ({ remotePath, localPath, force, matchPrefix }) {
Object.assign(this, {
remotePath,
localPath,
force,
matchPrefix
});
}
static from (refspec) {
const [
forceMatch,
remotePath,
remoteGlobMatch,
localPath,
localGlobMatch
] = refspec.match(/^(\+?)(.*?)(\*?):(.*?)(\*?)$/).slice(1);
const force = forceMatch === '+';
const remoteIsGlob = remoteGlobMatch === '*';
const localIsGlob = localGlobMatch === '*';
// validate
// TODO: Make this check more nuanced, and depend on whether this is a fetch refspec or a push refspec
if (remoteIsGlob !== localIsGlob) throw new Error('Invalid refspec')
return new GitRefSpec({
remotePath,
localPath,
force,
matchPrefix: remoteIsGlob
})
// TODO: We need to run resolveRef on both paths to expand them to their full name.
}
translate (remoteBranch) {
if (this.matchPrefix) {
if (remoteBranch.startsWith(this.remotePath)) {
return this.localPath + remoteBranch.replace(this.remotePath, '')
}
} else {
if (remoteBranch === this.remotePath) return this.localPath
}
return null
}
}
class GitRefSpecSet {
constructor (rules = []) {
this.rules = rules;
}
static from (refspecs) {
const rules = [];
for (const refspec of refspecs) {
rules.push(GitRefSpec.from(refspec)); // might throw
}
return new GitRefSpecSet(rules)
}
add (refspec) {
const rule = GitRefSpec.from(refspec); // might throw
this.rules.push(rule);
}
translate (remoteRefs) {
const result = [];
for (const rule of this.rules) {
for (const remoteRef of remoteRefs) {
const localRef = rule.translate(remoteRef);
if (localRef) {
result.push([remoteRef, localRef]);
}
}
}
return result
}
translateOne (remoteRef) {
let result = null;
for (const rule of this.rules) {
const localRef = rule.translate(remoteRef);
if (localRef) {
result = localRef;
}
}
return result
}
}
/*::
type TreeEntry = {
mode: string,
path: string,
oid: string,
type?: string
}
*/
function parseBuffer$1 (buffer) {
let _entries = [];
let cursor = 0;
while (cursor < buffer.length) {
let space = buffer.indexOf(32, cursor);
if (space === -1) {
throw new Error(
`GitTree: Error parsing buffer at byte location ${cursor}: Could not find the next space character.`
)
}
let nullchar = buffer.indexOf(0, cursor);
if (nullchar === -1) {
throw new Error(
`GitTree: Error parsing buffer at byte location ${cursor}: Could not find the next null character.`
)
}
let mode = buffer.slice(cursor, space).toString('utf8');
if (mode === '40000') mode = '040000'; // makes it line up neater in printed output
let type = mode === '040000' ? 'tree' : 'blob';
let path$$1 = buffer.slice(space + 1, nullchar).toString('utf8');
let oid = buffer.slice(nullchar + 1, nullchar + 21).toString('hex');
cursor = nullchar + 21;
_entries.push({ mode, path: path$$1, oid, type });
}
return _entries
}
function limitModeToAllowed (mode) {
if (typeof mode === 'number') {
mode = mode.toString(8);
}
// tree
if (mode.match(/^0?4.*/)) return '40000' // Directory
if (mode.match(/^1006.*/)) return '100644' // Regular non-executable file
if (mode.match(/^1007.*/)) return '100755' // Regular executable file
if (mode.match(/^120.*/)) return '120000' // Symbolic link
if (mode.match(/^160.*/)) return '160000' // Commit (git submodule reference)
throw new Error(`Could not understand file mode: ${mode}`)
}
function nudgeIntoShape (entry) {
if (!entry.oid && entry.sha) {
entry.oid = entry.sha; // Github
}
entry.mode = limitModeToAllowed(entry.mode); // index
if (!entry.type) {
entry.type = 'blob'; // index
}
return entry
}
class GitTree {
/*::
_entries: Array<TreeEntry>
*/
constructor (entries) {
if (Buffer.isBuffer(entries)) {
this._entries = parseBuffer$1(entries);
} else if (Array.isArray(entries)) {
this._entries = entries.map(nudgeIntoShape);
} else {
throw new Error('invalid type passed to GitTree constructor')
}
}
static from (tree) {
return new GitTree(tree)
}
render () {
return this._entries
.map(entry => `${entry.mode} ${entry.type} ${entry.oid} ${entry.path}`)
.join('\n')
}
toObject () {
return Buffer.concat(
this._entries.map(entry => {
let mode = Buffer.from(entry.mode.replace(/^0/, ''));
let space = Buffer.from(' ');
let path$$1 = Buffer.from(entry.path, { encoding: 'utf8' });
let nullchar = Buffer.from([0]);
let oid = Buffer.from(entry.oid.match(/../g).map(n => parseInt(n, 16)));
return Buffer.concat([mode, space, path$$1, nullchar, oid])
})
)
}
entries () {
return this._entries
}
* [Symbol.iterator] () {
for (let entry of this._entries) {
yield entry;
}
}
}
function normalize$1 (str) {
// remove all <CR>
str = str.replace(/\r/g, '');
// no extra newlines up front
str = str.replace(/^\n+/, '');
// and a single newline at the end
str = str.replace(/\n+$/, '') + '\n';
return str
}
function indent$1 (str) {
return (
str
.trim()
.split('\n')
.map(x => ' ' + x)
.join('\n') + '\n'
)
}
class SignedGitCommit extends GitCommit {
static from (commit) {
return new SignedGitCommit(commit)
}
async sign (openpgp, privateKeys) {
let commit = this.withoutSignature();
let headers = GitCommit.justHeaders(this._commit);
let message = GitCommit.justMessage(this._commit);
let privKeyObj = openpgp.key.readArmored(privateKeys).keys;
let { signature } = await openpgp.sign({
data: openpgp.util.str2Uint8Array(commit),
privateKeys: privKeyObj,
detached: true,
armor: true
});
// renormalize the line endings to the one true line-ending
signature = normalize$1(signature);
let signedCommit =
headers + '\n' + 'gpgsig' + indent$1(signature) + '\n' + message;
// return a new commit object
return GitCommit.from(signedCommit)
}
async listSigningKeys (openpgp) {
let msg = openpgp.message.readSignedContent(
this.withoutSignature(),
this.isolateSignature()
);
return msg.getSigningKeyIds().map(keyid => key