isomorphic-git
Version:
A pure JavaScript reimplementation of git for node and browsers
1,711 lines (1,520 loc) • 501 kB
JavaScript
import AsyncLock from 'async-lock';
import Hash from 'sha.js/sha1.js';
import { join } from 'path-browserify';
import crc32 from 'crc-32';
import pako from 'pako';
import pify from 'pify';
import ignore from 'ignore';
import cleanGitRef from 'clean-git-ref';
import validRef from 'is-git-ref-name-valid';
import diff3Merge from 'diff3';
/**
* @typedef {Object} GitProgressEvent
* @property {string} phase
* @property {number} loaded
* @property {number} total
*/
/**
* @callback ProgressCallback
* @param {GitProgressEvent} progress
* @returns {void | Promise<void>}
*/
/**
* @typedef {Object} GitHttpRequest
* @property {string} url - The URL to request
* @property {string} [method='GET'] - The HTTP method to use
* @property {Object<string, string>} [headers={}] - Headers to include in the HTTP request
* @property {Object} [agent] - An HTTP or HTTPS agent that manages connections for the HTTP client (Node.js only)
* @property {AsyncIterableIterator<Uint8Array>} [body] - An async iterator of Uint8Arrays that make up the body of POST requests
* @property {ProgressCallback} [onProgress] - Reserved for future use (emitting `GitProgressEvent`s)
* @property {object} [signal] - Reserved for future use (canceling a request)
*/
/**
* @typedef {Object} GitHttpResponse
* @property {string} url - The final URL that was fetched after any redirects
* @property {string} [method] - The HTTP method that was used
* @property {Object<string, string>} [headers] - HTTP response headers
* @property {AsyncIterableIterator<Uint8Array>} [body] - An async iterator of Uint8Arrays that make up the body of the response
* @property {number} statusCode - The HTTP status code
* @property {string} statusMessage - The HTTP status message
*/
/**
* @callback HttpFetch
* @param {GitHttpRequest} request
* @returns {Promise<GitHttpResponse>}
*/
/**
* @typedef {Object} HttpClient
* @property {HttpFetch} request
*/
/**
* A git commit object.
*
* @typedef {Object} CommitObject
* @property {string} message Commit message
* @property {string} tree SHA-1 object id of corresponding file tree
* @property {string[]} parent an array of zero or more SHA-1 object ids
* @property {Object} author
* @property {string} author.name The author's name
* @property {string} author.email The author's email
* @property {number} author.timestamp UTC Unix timestamp in seconds
* @property {number} author.timezoneOffset Timezone difference from UTC in minutes
* @property {Object} committer
* @property {string} committer.name The committer's name
* @property {string} committer.email The committer's email
* @property {number} committer.timestamp UTC Unix timestamp in seconds
* @property {number} committer.timezoneOffset Timezone difference from UTC in minutes
* @property {string} [gpgsig] PGP signature (if present)
*/
/**
* An entry from a git tree object. Files are called 'blobs' and directories are called 'trees'.
*
* @typedef {Object} TreeEntry
* @property {string} mode the 6 digit hexadecimal mode
* @property {string} path the name of the file or directory
* @property {string} oid the SHA-1 object id of the blob or tree
* @property {'commit'|'blob'|'tree'} type the type of object
*/
/**
* A git tree object. Trees represent a directory snapshot.
*
* @typedef {TreeEntry[]} TreeObject
*/
/**
* A git annotated tag object.
*
* @typedef {Object} TagObject
* @property {string} object SHA-1 object id of object being tagged
* @property {'blob' | 'tree' | 'commit' | 'tag'} type the type of the object being tagged
* @property {string} tag the tag name
* @property {Object} tagger
* @property {string} tagger.name the tagger's name
* @property {string} tagger.email the tagger's email
* @property {number} tagger.timestamp UTC Unix timestamp in seconds
* @property {number} tagger.timezoneOffset timezone difference from UTC in minutes
* @property {string} message tag message
* @property {string} [gpgsig] PGP signature (if present)
*/
/**
* @typedef {Object} ReadCommitResult
* @property {string} oid - SHA-1 object id of this commit
* @property {CommitObject} commit - the parsed commit object
* @property {string} payload - PGP signing payload
*/
/**
* @typedef {Object} ServerRef - This object has the following schema:
* @property {string} ref - The name of the ref
* @property {string} oid - The SHA-1 object id the ref points to
* @property {string} [target] - The target ref pointed to by a symbolic ref
* @property {string} [peeled] - If the oid is the SHA-1 object id of an annotated tag, this is the SHA-1 object id that the annotated tag points to
*/
/**
* @typedef Walker
* @property {Symbol} Symbol('GitWalkerSymbol')
*/
/**
* Normalized subset of filesystem `stat` data:
*
* @typedef {Object} Stat
* @property {number} ctimeSeconds
* @property {number} ctimeNanoseconds
* @property {number} mtimeSeconds
* @property {number} mtimeNanoseconds
* @property {number} dev
* @property {number} ino
* @property {number} mode
* @property {number} uid
* @property {number} gid
* @property {number} size
*/
/**
* The `WalkerEntry` is an interface that abstracts computing many common tree / blob stats.
*
* @typedef {Object} WalkerEntry
* @property {function(): Promise<'tree'|'blob'|'special'|'commit'>} type
* @property {function(): Promise<number>} mode
* @property {function(): Promise<string>} oid
* @property {function(): Promise<Uint8Array|void>} content
* @property {function(): Promise<Stat>} stat
*/
/**
* @typedef {Object} CallbackFsClient
* @property {function} readFile - https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback
* @property {function} writeFile - https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback
* @property {function} unlink - https://nodejs.org/api/fs.html#fs_fs_unlink_path_callback
* @property {function} readdir - https://nodejs.org/api/fs.html#fs_fs_readdir_path_options_callback
* @property {function} mkdir - https://nodejs.org/api/fs.html#fs_fs_mkdir_path_mode_callback
* @property {function} rmdir - https://nodejs.org/api/fs.html#fs_fs_rmdir_path_callback
* @property {function} stat - https://nodejs.org/api/fs.html#fs_fs_stat_path_options_callback
* @property {function} lstat - https://nodejs.org/api/fs.html#fs_fs_lstat_path_options_callback
* @property {function} [readlink] - https://nodejs.org/api/fs.html#fs_fs_readlink_path_options_callback
* @property {function} [symlink] - https://nodejs.org/api/fs.html#fs_fs_symlink_target_path_type_callback
* @property {function} [chmod] - https://nodejs.org/api/fs.html#fs_fs_chmod_path_mode_callback
*/
/**
* @typedef {Object} PromiseFsClient
* @property {Object} promises
* @property {function} promises.readFile - https://nodejs.org/api/fs.html#fs_fspromises_readfile_path_options
* @property {function} promises.writeFile - https://nodejs.org/api/fs.html#fs_fspromises_writefile_file_data_options
* @property {function} promises.unlink - https://nodejs.org/api/fs.html#fs_fspromises_unlink_path
* @property {function} promises.readdir - https://nodejs.org/api/fs.html#fs_fspromises_readdir_path_options
* @property {function} promises.mkdir - https://nodejs.org/api/fs.html#fs_fspromises_mkdir_path_options
* @property {function} promises.rmdir - https://nodejs.org/api/fs.html#fs_fspromises_rmdir_path
* @property {function} promises.stat - https://nodejs.org/api/fs.html#fs_fspromises_stat_path_options
* @property {function} promises.lstat - https://nodejs.org/api/fs.html#fs_fspromises_lstat_path_options
* @property {function} [promises.readlink] - https://nodejs.org/api/fs.html#fs_fspromises_readlink_path_options
* @property {function} [promises.symlink] - https://nodejs.org/api/fs.html#fs_fspromises_symlink_target_path_type
* @property {function} [promises.chmod] - https://nodejs.org/api/fs.html#fs_fspromises_chmod_path_mode
*/
/**
* @typedef {CallbackFsClient | PromiseFsClient} FsClient
*/
/**
* @callback MessageCallback
* @param {string} message
* @returns {void | Promise<void>}
*/
/**
* @typedef {Object} GitAuth
* @property {string} [username]
* @property {string} [password]
* @property {Object<string, string>} [headers]
* @property {boolean} [cancel] Tells git to throw a `UserCanceledError` (instead of an `HttpError`).
*/
/**
* @callback AuthCallback
* @param {string} url
* @param {GitAuth} auth Might have some values if the URL itself originally contained a username or password.
* @returns {GitAuth | void | Promise<GitAuth | void>}
*/
/**
* @callback AuthFailureCallback
* @param {string} url
* @param {GitAuth} auth The credentials that failed
* @returns {GitAuth | void | Promise<GitAuth | void>}
*/
/**
* @callback AuthSuccessCallback
* @param {string} url
* @param {GitAuth} auth
* @returns {void | Promise<void>}
*/
/**
* @typedef {Object} SignParams
* @property {string} payload - a plaintext message
* @property {string} secretKey - an 'ASCII armor' encoded PGP key (technically can actually contain _multiple_ keys)
*/
/**
* @callback SignCallback
* @param {SignParams} args
* @return {{signature: string} | Promise<{signature: string}>} - an 'ASCII armor' encoded "detached" signature
*/
/**
* @typedef {Object} MergeDriverParams
* @property {Array<string>} branches
* @property {Array<string>} contents
* @property {string} path
*/
/**
* @callback MergeDriverCallback
* @param {MergeDriverParams} args
* @return {{cleanMerge: boolean, mergedText: string} | Promise<{cleanMerge: boolean, mergedText: string}>}
*/
/**
* @callback WalkerMap
* @param {string} filename
* @param {WalkerEntry[]} entries
* @returns {Promise<any>}
*/
/**
* @callback WalkerReduce
* @param {any} parent
* @param {any[]} children
* @returns {Promise<any>}
*/
/**
* @callback WalkerIterateCallback
* @param {WalkerEntry[]} entries
* @returns {Promise<any[]>}
*/
/**
* @callback WalkerIterate
* @param {WalkerIterateCallback} walk
* @param {IterableIterator<WalkerEntry[]>} children
* @returns {Promise<any[]>}
*/
/**
* @typedef {Object} RefUpdateStatus
* @property {boolean} ok
* @property {string} error
*/
/**
* @typedef {Object} PushResult
* @property {boolean} ok
* @property {?string} error
* @property {Object<string, RefUpdateStatus>} refs
* @property {Object<string, string>} [headers]
*/
/**
* @typedef {0|1} HeadStatus
*/
/**
* @typedef {0|1|2} WorkdirStatus
*/
/**
* @typedef {0|1|2|3} StageStatus
*/
/**
* @typedef {[string, HeadStatus, WorkdirStatus, StageStatus]} StatusRow
*/
/**
* @typedef {'push' | 'pop' | 'apply' | 'drop' | 'list' | 'clear'} StashOp the type of stash ops
*/
/**
* @typedef {'equal' | 'modify' | 'add' | 'remove' | 'unknown'} StashChangeType - when compare WORDIR to HEAD, 'remove' could mean 'untracked'
* @typedef {Object} ClientRef
* @property {string} ref The name of the ref
* @property {string} oid The SHA-1 object id the ref points to
*/
/**
* @typedef {Object} PrePushParams
* @property {string} remote The expanded name of target remote
* @property {string} url The URL address of target remote
* @property {ClientRef} localRef The ref which the client wants to push to the remote
* @property {ClientRef} remoteRef The ref which is known by the remote
*/
/**
* @callback PrePushCallback
* @param {PrePushParams} args
* @returns {boolean | Promise<boolean>} Returns false if push must be cancelled
*/
/**
* @typedef {Object} PostCheckoutParams
* @property {string} previousHead The SHA-1 object id of HEAD before checkout
* @property {string} newHead The SHA-1 object id of HEAD after checkout
* @property {'branch' | 'file'} type flag determining whether a branch or a set of files was checked
*/
/**
* @callback PostCheckoutCallback
* @param {PostCheckoutParams} args
* @returns {void | Promise<void>}
*/
class BaseError extends Error {
constructor(message) {
super(message);
// Setting this here allows TS to infer that all git errors have a `caller` property and
// that its type is string.
this.caller = '';
}
toJSON() {
// Error objects aren't normally serializable. So we do something about that.
return {
code: this.code,
data: this.data,
caller: this.caller,
message: this.message,
stack: this.stack,
}
}
fromJSON(json) {
const e = new BaseError(json.message);
e.code = json.code;
e.data = json.data;
e.caller = json.caller;
e.stack = json.stack;
return e
}
get isIsomorphicGitError() {
return true
}
}
class UnmergedPathsError extends BaseError {
/**
* @param {Array<string>} filepaths
*/
constructor(filepaths) {
super(
`Modifying the index is not possible because you have unmerged files: ${filepaths.toString}. Fix them up in the work tree, and then use 'git add/rm as appropriate to mark resolution and make a commit.`
);
this.code = this.name = UnmergedPathsError.code;
this.data = { filepaths };
}
}
/** @type {'UnmergedPathsError'} */
UnmergedPathsError.code = 'UnmergedPathsError';
class InternalError extends BaseError {
/**
* @param {string} message
*/
constructor(message) {
super(
`An internal error caused this command to fail. Please file a bug report at https://github.com/isomorphic-git/isomorphic-git/issues with this error message: ${message}`
);
this.code = this.name = InternalError.code;
this.data = { message };
}
}
/** @type {'InternalError'} */
InternalError.code = 'InternalError';
class UnsafeFilepathError extends BaseError {
/**
* @param {string} filepath
*/
constructor(filepath) {
super(`The filepath "${filepath}" contains unsafe character sequences`);
this.code = this.name = UnsafeFilepathError.code;
this.data = { filepath };
}
}
/** @type {'UnsafeFilepathError'} */
UnsafeFilepathError.code = 'UnsafeFilepathError';
// Modeled after https://github.com/tjfontaine/node-buffercursor
// but with the goal of being much lighter weight.
class BufferCursor {
constructor(buffer) {
this.buffer = buffer;
this._start = 0;
}
eof() {
return this._start >= this.buffer.length
}
tell() {
return this._start
}
seek(n) {
this._start = n;
}
slice(n) {
const r = this.buffer.slice(this._start, this._start + n);
this._start += n;
return r
}
toString(enc, length) {
const r = this.buffer.toString(enc, this._start, this._start + length);
this._start += length;
return r
}
write(value, length, enc) {
const r = this.buffer.write(value, this._start, length, enc);
this._start += length;
return r
}
copy(source, start, end) {
const r = source.copy(this.buffer, this._start, start, end);
this._start += r;
return r
}
readUInt8() {
const r = this.buffer.readUInt8(this._start);
this._start += 1;
return r
}
writeUInt8(value) {
const r = this.buffer.writeUInt8(value, this._start);
this._start += 1;
return r
}
readUInt16BE() {
const r = this.buffer.readUInt16BE(this._start);
this._start += 2;
return r
}
writeUInt16BE(value) {
const r = this.buffer.writeUInt16BE(value, this._start);
this._start += 2;
return r
}
readUInt32BE() {
const r = this.buffer.readUInt32BE(this._start);
this._start += 4;
return r
}
writeUInt32BE(value) {
const r = this.buffer.writeUInt32BE(value, this._start);
this._start += 4;
return r
}
}
function compareStrings(a, b) {
// https://stackoverflow.com/a/40355107/2168416
return -(a < b) || +(a > b)
}
function comparePath(a, b) {
// https://stackoverflow.com/a/40355107/2168416
return compareStrings(a.path, b.path)
}
/**
* From https://github.com/git/git/blob/master/Documentation/technical/index-format.txt
*
* 32-bit mode, split into (high to low bits)
*
* 4-bit object type
* valid values in binary are 1000 (regular file), 1010 (symbolic link)
* and 1110 (gitlink)
*
* 3-bit unused
*
* 9-bit unix permission. Only 0755 and 0644 are valid for regular files.
* Symbolic links and gitlinks have value 0 in this field.
*/
function normalizeMode(mode) {
// Note: BrowserFS will use -1 for "unknown"
// I need to make it non-negative for these bitshifts to work.
let type = mode > 0 ? mode >> 12 : 0;
// If it isn't valid, assume it as a "regular file"
// 0100 = directory
// 1000 = regular file
// 1010 = symlink
// 1110 = gitlink
if (
type !== 0b0100 &&
type !== 0b1000 &&
type !== 0b1010 &&
type !== 0b1110
) {
type = 0b1000;
}
let permissions = mode & 0o777;
// Is the file executable? then 755. Else 644.
if (permissions & 0b001001001) {
permissions = 0o755;
} else {
permissions = 0o644;
}
// If it's not a regular file, scrub all permissions
if (type !== 0b1000) permissions = 0;
return (type << 12) + permissions
}
const MAX_UINT32 = 2 ** 32;
function SecondsNanoseconds(
givenSeconds,
givenNanoseconds,
milliseconds,
date
) {
if (givenSeconds !== undefined && givenNanoseconds !== undefined) {
return [givenSeconds, givenNanoseconds]
}
if (milliseconds === undefined) {
milliseconds = date.valueOf();
}
const seconds = Math.floor(milliseconds / 1000);
const nanoseconds = (milliseconds - seconds * 1000) * 1000000;
return [seconds, nanoseconds]
}
function normalizeStats(e) {
const [ctimeSeconds, ctimeNanoseconds] = SecondsNanoseconds(
e.ctimeSeconds,
e.ctimeNanoseconds,
e.ctimeMs,
e.ctime
);
const [mtimeSeconds, mtimeNanoseconds] = SecondsNanoseconds(
e.mtimeSeconds,
e.mtimeNanoseconds,
e.mtimeMs,
e.mtime
);
return {
ctimeSeconds: ctimeSeconds % MAX_UINT32,
ctimeNanoseconds: ctimeNanoseconds % MAX_UINT32,
mtimeSeconds: mtimeSeconds % MAX_UINT32,
mtimeNanoseconds: mtimeNanoseconds % MAX_UINT32,
dev: e.dev % MAX_UINT32,
ino: e.ino % MAX_UINT32,
mode: normalizeMode(e.mode % MAX_UINT32),
uid: e.uid % MAX_UINT32,
gid: e.gid % MAX_UINT32,
// size of -1 happens over a BrowserFS HTTP Backend that doesn't serve Content-Length headers
// (like the Karma webserver) because BrowserFS HTTP Backend uses HTTP HEAD requests to do fs.stat
size: e.size > -1 ? e.size % MAX_UINT32 : 0,
}
}
function toHex(buffer) {
let hex = '';
for (const byte of new Uint8Array(buffer)) {
if (byte < 16) hex += '0';
hex += byte.toString(16);
}
return hex
}
/* eslint-env node, browser */
let supportsSubtleSHA1 = null;
async function shasum(buffer) {
if (supportsSubtleSHA1 === null) {
supportsSubtleSHA1 = await testSubtleSHA1();
}
return supportsSubtleSHA1 ? subtleSHA1(buffer) : shasumSync(buffer)
}
// This is modeled after @dominictarr's "shasum" module,
// but without the 'json-stable-stringify' dependency and
// extra type-casting features.
function shasumSync(buffer) {
return new Hash().update(buffer).digest('hex')
}
async function subtleSHA1(buffer) {
const hash = await crypto.subtle.digest('SHA-1', buffer);
return toHex(hash)
}
async function testSubtleSHA1() {
// I'm using a rather crude method of progressive enhancement, because
// some browsers that have crypto.subtle.digest don't actually implement SHA-1.
try {
const hash = await subtleSHA1(new Uint8Array([]));
return hash === 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
} catch (_) {
// no bother
}
return false
}
// Extract 1-bit assume-valid, 1-bit extended flag, 2-bit merge state flag, 12-bit path length flag
function parseCacheEntryFlags(bits) {
return {
assumeValid: Boolean(bits & 0b1000000000000000),
extended: Boolean(bits & 0b0100000000000000),
stage: (bits & 0b0011000000000000) >> 12,
nameLength: bits & 0b0000111111111111,
}
}
function renderCacheEntryFlags(entry) {
const flags = entry.flags;
// 1-bit extended flag (must be zero in version 2)
flags.extended = false;
// 12-bit name length if the length is less than 0xFFF; otherwise 0xFFF
// is stored in this field.
flags.nameLength = Math.min(Buffer.from(entry.path).length, 0xfff);
return (
(flags.assumeValid ? 0b1000000000000000 : 0) +
(flags.extended ? 0b0100000000000000 : 0) +
((flags.stage & 0b11) << 12) +
(flags.nameLength & 0b111111111111)
)
}
class GitIndex {
/*::
_entries: Map<string, CacheEntry>
_dirty: boolean // Used to determine if index needs to be saved to filesystem
*/
constructor(entries, unmergedPaths) {
this._dirty = false;
this._unmergedPaths = unmergedPaths || new Set();
this._entries = entries || new Map();
}
_addEntry(entry) {
if (entry.flags.stage === 0) {
entry.stages = [entry];
this._entries.set(entry.path, entry);
this._unmergedPaths.delete(entry.path);
} else {
let existingEntry = this._entries.get(entry.path);
if (!existingEntry) {
this._entries.set(entry.path, entry);
existingEntry = entry;
}
existingEntry.stages[entry.flags.stage] = entry;
this._unmergedPaths.add(entry.path);
}
}
static async from(buffer) {
if (Buffer.isBuffer(buffer)) {
return GitIndex.fromBuffer(buffer)
} else if (buffer === null) {
return new GitIndex(null)
} else {
throw new InternalError('invalid type passed to GitIndex.from')
}
}
static async fromBuffer(buffer) {
if (buffer.length === 0) {
throw new InternalError('Index file is empty (.git/index)')
}
const index = new GitIndex();
const reader = new BufferCursor(buffer);
const magic = reader.toString('utf8', 4);
if (magic !== 'DIRC') {
throw new InternalError(`Invalid dircache magic file number: ${magic}`)
}
// Verify shasum after we ensured that the file has a magic number
const shaComputed = await shasum(buffer.slice(0, -20));
const shaClaimed = buffer.slice(-20).toString('hex');
if (shaClaimed !== shaComputed) {
throw new InternalError(
`Invalid checksum in GitIndex buffer: expected ${shaClaimed} but saw ${shaComputed}`
)
}
const version = reader.readUInt32BE();
if (version !== 2) {
throw new InternalError(`Unsupported dircache version: ${version}`)
}
const numEntries = reader.readUInt32BE();
let i = 0;
while (!reader.eof() && i < numEntries) {
const entry = {};
entry.ctimeSeconds = reader.readUInt32BE();
entry.ctimeNanoseconds = reader.readUInt32BE();
entry.mtimeSeconds = reader.readUInt32BE();
entry.mtimeNanoseconds = reader.readUInt32BE();
entry.dev = reader.readUInt32BE();
entry.ino = reader.readUInt32BE();
entry.mode = reader.readUInt32BE();
entry.uid = reader.readUInt32BE();
entry.gid = reader.readUInt32BE();
entry.size = reader.readUInt32BE();
entry.oid = reader.slice(20).toString('hex');
const flags = reader.readUInt16BE();
entry.flags = parseCacheEntryFlags(flags);
// TODO: handle if (version === 3 && entry.flags.extended)
const pathlength = buffer.indexOf(0, reader.tell() + 1) - reader.tell();
if (pathlength < 1) {
throw new InternalError(`Got a path length of: ${pathlength}`)
}
// TODO: handle pathnames larger than 12 bits
entry.path = reader.toString('utf8', pathlength);
// Prevent malicious paths like "..\foo"
if (entry.path.includes('..\\') || entry.path.includes('../')) {
throw new UnsafeFilepathError(entry.path)
}
// The next bit is awkward. We expect 1 to 8 null characters
// such that the total size of the entry is a multiple of 8 bits.
// (Hence subtract 12 bytes for the header.)
let padding = 8 - ((reader.tell() - 12) % 8);
if (padding === 0) padding = 8;
while (padding--) {
const tmp = reader.readUInt8();
if (tmp !== 0) {
throw new InternalError(
`Expected 1-8 null characters but got '${tmp}' after ${entry.path}`
)
} else if (reader.eof()) {
throw new InternalError('Unexpected end of file')
}
}
// end of awkward part
entry.stages = [];
index._addEntry(entry);
i++;
}
return index
}
get unmergedPaths() {
return [...this._unmergedPaths]
}
get entries() {
return [...this._entries.values()].sort(comparePath)
}
get entriesMap() {
return this._entries
}
get entriesFlat() {
return [...this.entries].flatMap(entry => {
return entry.stages.length > 1 ? entry.stages.filter(x => x) : entry
})
}
*[Symbol.iterator]() {
for (const entry of this.entries) {
yield entry;
}
}
insert({ filepath, stats, oid, stage = 0 }) {
if (!stats) {
stats = {
ctimeSeconds: 0,
ctimeNanoseconds: 0,
mtimeSeconds: 0,
mtimeNanoseconds: 0,
dev: 0,
ino: 0,
mode: 0,
uid: 0,
gid: 0,
size: 0,
};
}
stats = normalizeStats(stats);
const bfilepath = Buffer.from(filepath);
const entry = {
ctimeSeconds: stats.ctimeSeconds,
ctimeNanoseconds: stats.ctimeNanoseconds,
mtimeSeconds: stats.mtimeSeconds,
mtimeNanoseconds: stats.mtimeNanoseconds,
dev: stats.dev,
ino: stats.ino,
// We provide a fallback value for `mode` here because not all fs
// implementations assign it, but we use it in GitTree.
// '100644' is for a "regular non-executable file"
mode: stats.mode || 0o100644,
uid: stats.uid,
gid: stats.gid,
size: stats.size,
path: filepath,
oid: oid,
flags: {
assumeValid: false,
extended: false,
stage,
nameLength: bfilepath.length < 0xfff ? bfilepath.length : 0xfff,
},
stages: [],
};
this._addEntry(entry);
this._dirty = true;
}
delete({ filepath }) {
if (this._entries.has(filepath)) {
this._entries.delete(filepath);
} else {
for (const key of this._entries.keys()) {
if (key.startsWith(filepath + '/')) {
this._entries.delete(key);
}
}
}
if (this._unmergedPaths.has(filepath)) {
this._unmergedPaths.delete(filepath);
}
this._dirty = true;
}
clear() {
this._entries.clear();
this._dirty = true;
}
has({ filepath }) {
return this._entries.has(filepath)
}
render() {
return this.entries
.map(entry => `${entry.mode.toString(8)} ${entry.oid} ${entry.path}`)
.join('\n')
}
static async _entryToBuffer(entry) {
const bpath = Buffer.from(entry.path);
// the fixed length + the filename + at least one null char => align by 8
const length = Math.ceil((62 + bpath.length + 1) / 8) * 8;
const written = Buffer.alloc(length);
const writer = new BufferCursor(written);
const stat = normalizeStats(entry);
writer.writeUInt32BE(stat.ctimeSeconds);
writer.writeUInt32BE(stat.ctimeNanoseconds);
writer.writeUInt32BE(stat.mtimeSeconds);
writer.writeUInt32BE(stat.mtimeNanoseconds);
writer.writeUInt32BE(stat.dev);
writer.writeUInt32BE(stat.ino);
writer.writeUInt32BE(stat.mode);
writer.writeUInt32BE(stat.uid);
writer.writeUInt32BE(stat.gid);
writer.writeUInt32BE(stat.size);
writer.write(entry.oid, 20, 'hex');
writer.writeUInt16BE(renderCacheEntryFlags(entry));
writer.write(entry.path, bpath.length, 'utf8');
return written
}
async toObject() {
const header = Buffer.alloc(12);
const writer = new BufferCursor(header);
writer.write('DIRC', 4, 'utf8');
writer.writeUInt32BE(2);
writer.writeUInt32BE(this.entriesFlat.length);
let entryBuffers = [];
for (const entry of this.entries) {
entryBuffers.push(GitIndex._entryToBuffer(entry));
if (entry.stages.length > 1) {
for (const stage of entry.stages) {
if (stage && stage !== entry) {
entryBuffers.push(GitIndex._entryToBuffer(stage));
}
}
}
}
entryBuffers = await Promise.all(entryBuffers);
const body = Buffer.concat(entryBuffers);
const main = Buffer.concat([header, body]);
const sum = await shasum(main);
return Buffer.concat([main, Buffer.from(sum, 'hex')])
}
}
function compareStats(entry, stats, filemode = true, trustino = true) {
// Comparison based on the description in Paragraph 4 of
// https://www.kernel.org/pub/software/scm/git/docs/technical/racy-git.txt
const e = normalizeStats(entry);
const s = normalizeStats(stats);
const staleness =
(filemode && e.mode !== s.mode) ||
e.mtimeSeconds !== s.mtimeSeconds ||
e.ctimeSeconds !== s.ctimeSeconds ||
e.uid !== s.uid ||
e.gid !== s.gid ||
(trustino && e.ino !== s.ino) ||
e.size !== s.size;
return staleness
}
// import Lock from '../utils.js'
// const lm = new LockManager()
let lock = null;
const IndexCache = Symbol('IndexCache');
/**
* Creates a cache object to store GitIndex and file stats.
* @returns {object} A cache object with `map` and `stats` properties.
*/
function createCache() {
return {
map: new Map(),
stats: new Map(),
}
}
/**
* Updates the cached index file by reading the file system and parsing the Git index.
* @param {FSClient} fs - A file system implementation.
* @param {string} filepath - The path to the Git index file.
* @param {object} cache - The cache object to update.
* @returns {Promise<void>}
*/
async function updateCachedIndexFile(fs, filepath, cache) {
const [stat, rawIndexFile] = await Promise.all([
fs.lstat(filepath),
fs.read(filepath),
]);
const index = await GitIndex.from(rawIndexFile);
// cache the GitIndex object so we don't need to re-read it every time.
cache.map.set(filepath, index);
// Save the stat data for the index so we know whether the cached file is stale (modified by an outside process).
cache.stats.set(filepath, stat);
}
/**
* Determines whether the cached index file is stale by comparing file stats.
* @param {FSClient} fs - A file system implementation.
* @param {string} filepath - The path to the Git index file.
* @param {object} cache - The cache object containing file stats.
* @returns {Promise<boolean>} `true` if the index file is stale, otherwise `false`.
*/
async function isIndexStale(fs, filepath, cache) {
const savedStats = cache.stats.get(filepath);
if (savedStats === undefined) return true
if (savedStats === null) return false
const currStats = await fs.lstat(filepath);
if (currStats === null) return false
return compareStats(savedStats, currStats)
}
class GitIndexManager {
/**
* Manages access to the Git index file, ensuring thread-safe operations and caching.
*
* @param {object} opts - Options for acquiring the Git index.
* @param {FSClient} opts.fs - A file system implementation.
* @param {string} opts.gitdir - The path to the `.git` directory.
* @param {object} opts.cache - A shared cache object for storing index data.
* @param {boolean} [opts.allowUnmerged=true] - Whether to allow unmerged paths in the index.
* @param {function(GitIndex): any} closure - A function to execute with the Git index.
* @returns {Promise<any>} The result of the closure function.
* @throws {UnmergedPathsError} If unmerged paths exist and `allowUnmerged` is `false`.
*/
static async acquire({ fs, gitdir, cache, allowUnmerged = true }, closure) {
if (!cache[IndexCache]) {
cache[IndexCache] = createCache();
}
const filepath = `${gitdir}/index`;
if (lock === null) lock = new AsyncLock({ maxPending: Infinity });
let result;
let unmergedPaths = [];
await lock.acquire(filepath, async () => {
// Acquire a file lock while we're reading the index
// to make sure other processes aren't writing to it
// simultaneously, which could result in a corrupted index.
// const fileLock = await Lock(filepath)
const theIndexCache = cache[IndexCache];
if (await isIndexStale(fs, filepath, theIndexCache)) {
await updateCachedIndexFile(fs, filepath, theIndexCache);
}
const index = theIndexCache.map.get(filepath);
unmergedPaths = index.unmergedPaths;
if (unmergedPaths.length && !allowUnmerged)
throw new UnmergedPathsError(unmergedPaths)
result = await closure(index);
if (index._dirty) {
// Acquire a file lock while we're writing the index file
// let fileLock = await Lock(filepath)
const buffer = await index.toObject();
await fs.write(filepath, buffer);
// Update cached stat value
theIndexCache.stats.set(filepath, await fs.lstat(filepath));
index._dirty = false;
}
});
return result
}
}
function basename(path) {
const last = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\'));
if (last > -1) {
path = path.slice(last + 1);
}
return path
}
function dirname(path) {
const last = Math.max(path.lastIndexOf('/'), path.lastIndexOf('\\'));
if (last === -1) return '.'
if (last === 0) return '/'
return path.slice(0, last)
}
/*::
type Node = {
type: string,
fullpath: string,
basename: string,
metadata: Object, // mode, oid
parent?: Node,
children: Array<Node>
}
*/
function flatFileListToDirectoryStructure(files) {
const inodes = new Map();
const mkdir = function(name) {
if (!inodes.has(name)) {
const dir = {
type: 'tree',
fullpath: name,
basename: basename(name),
metadata: {},
children: [],
};
inodes.set(name, dir);
// This recursively generates any missing parent folders.
// We do it after we've added the inode to the set so that
// we don't recurse infinitely trying to create the root '.' dirname.
dir.parent = mkdir(dirname(name));
if (dir.parent && dir.parent !== dir) dir.parent.children.push(dir);
}
return inodes.get(name)
};
const mkfile = function(name, metadata) {
if (!inodes.has(name)) {
const file = {
type: 'blob',
fullpath: name,
basename: basename(name),
metadata: metadata,
// This recursively generates any missing parent folders.
parent: mkdir(dirname(name)),
children: [],
};
if (file.parent) file.parent.children.push(file);
inodes.set(name, file);
}
return inodes.get(name)
};
mkdir('.');
for (const file of files) {
mkfile(file.path, file);
}
return inodes
}
/**
*
* @param {number} mode
*/
function mode2type(mode) {
// prettier-ignore
switch (mode) {
case 0o040000: return 'tree'
case 0o100644: return 'blob'
case 0o100755: return 'blob'
case 0o120000: return 'blob'
case 0o160000: return 'commit'
}
throw new InternalError(`Unexpected GitTree entry mode: ${mode.toString(8)}`)
}
class GitWalkerIndex {
constructor({ fs, gitdir, cache }) {
this.treePromise = GitIndexManager.acquire(
{ fs, gitdir, cache },
async function(index) {
return flatFileListToDirectoryStructure(index.entries)
}
);
const walker = this;
this.ConstructEntry = class StageEntry {
constructor(fullpath) {
this._fullpath = fullpath;
this._type = false;
this._mode = false;
this._stat = false;
this._oid = false;
}
async type() {
return walker.type(this)
}
async mode() {
return walker.mode(this)
}
async stat() {
return walker.stat(this)
}
async content() {
return walker.content(this)
}
async oid() {
return walker.oid(this)
}
};
}
async readdir(entry) {
const filepath = entry._fullpath;
const tree = await this.treePromise;
const inode = tree.get(filepath);
if (!inode) return null
if (inode.type === 'blob') return null
if (inode.type !== 'tree') {
throw new Error(`ENOTDIR: not a directory, scandir '${filepath}'`)
}
const names = inode.children.map(inode => inode.fullpath);
names.sort(compareStrings);
return names
}
async type(entry) {
if (entry._type === false) {
await entry.stat();
}
return entry._type
}
async mode(entry) {
if (entry._mode === false) {
await entry.stat();
}
return entry._mode
}
async stat(entry) {
if (entry._stat === false) {
const tree = await this.treePromise;
const inode = tree.get(entry._fullpath);
if (!inode) {
throw new Error(
`ENOENT: no such file or directory, lstat '${entry._fullpath}'`
)
}
const stats = inode.type === 'tree' ? {} : normalizeStats(inode.metadata);
entry._type = inode.type === 'tree' ? 'tree' : mode2type(stats.mode);
entry._mode = stats.mode;
if (inode.type === 'tree') {
entry._stat = undefined;
} else {
entry._stat = stats;
}
}
return entry._stat
}
async content(_entry) {
// Cannot get content for an index entry
}
async oid(entry) {
if (entry._oid === false) {
const tree = await this.treePromise;
const inode = tree.get(entry._fullpath);
entry._oid = inode.metadata.oid;
}
return entry._oid
}
}
// This is part of an elaborate system to facilitate code-splitting / tree-shaking.
// commands/walk.js can depend on only this, and the actual Walker classes exported
// can be opaque - only having a single property (this symbol) that is not enumerable,
// and thus the constructor can be passed as an argument to walk while being "unusable"
// outside of it.
const GitWalkSymbol = Symbol('GitWalkSymbol');
// @ts-check
/**
* @returns {Walker}
*/
function STAGE() {
const o = Object.create(null);
Object.defineProperty(o, GitWalkSymbol, {
value: function({ fs, gitdir, cache }) {
return new GitWalkerIndex({ fs, gitdir, cache })
},
});
Object.freeze(o);
return o
}
// @ts-check
class NotFoundError extends BaseError {
/**
* @param {string} what
*/
constructor(what) {
super(`Could not find ${what}.`);
this.code = this.name = NotFoundError.code;
this.data = { what };
}
}
/** @type {'NotFoundError'} */
NotFoundError.code = 'NotFoundError';
class ObjectTypeError extends BaseError {
/**
* @param {string} oid
* @param {'blob'|'commit'|'tag'|'tree'} actual
* @param {'blob'|'commit'|'tag'|'tree'} expected
* @param {string} [filepath]
*/
constructor(oid, actual, expected, filepath) {
super(
`Object ${oid} ${
filepath ? `at ${filepath}` : ''
}was anticipated to be a ${expected} but it is a ${actual}.`
);
this.code = this.name = ObjectTypeError.code;
this.data = { oid, actual, expected, filepath };
}
}
/** @type {'ObjectTypeError'} */
ObjectTypeError.code = 'ObjectTypeError';
class InvalidOidError extends BaseError {
/**
* @param {string} value
*/
constructor(value) {
super(`Expected a 40-char hex object id but saw "${value}".`);
this.code = this.name = InvalidOidError.code;
this.data = { value };
}
}
/** @type {'InvalidOidError'} */
InvalidOidError.code = 'InvalidOidError';
class NoRefspecError extends BaseError {
/**
* @param {string} remote
*/
constructor(remote) {
super(`Could not find a fetch refspec for remote "${remote}". Make sure the config file has an entry like the following:
[remote "${remote}"]
\tfetch = +refs/heads/*:refs/remotes/origin/*
`);
this.code = this.name = NoRefspecError.code;
this.data = { remote };
}
}
/** @type {'NoRefspecError'} */
NoRefspecError.code = 'NoRefspecError';
class GitPackedRefs {
constructor(text) {
this.refs = new Map();
this.parsedConfig = [];
if (text) {
let key = null;
this.parsedConfig = text
.trim()
.split('\n')
.map(line => {
if (/^\s*#/.test(line)) {
return { line, comment: true }
}
const i = line.indexOf(' ');
if (line.startsWith('^')) {
// This is a oid for the commit associated with the annotated tag immediately preceding this line.
// Trim off the '^'
const value = line.slice(1);
// The tagname^{} syntax is based on the output of `git show-ref --tags -d`
this.refs.set(key + '^{}', value);
return { line, ref: key, peeled: value }
} else {
// This is an oid followed by the ref name
const value = line.slice(0, i);
key = line.slice(i + 1);
this.refs.set(key, value);
return { line, ref: key, oid: value }
}
});
}
return this
}
static from(text) {
return new GitPackedRefs(text)
}
delete(ref) {
this.parsedConfig = this.parsedConfig.filter(entry => entry.ref !== ref);
this.refs.delete(ref);
}
toString() {
return this.parsedConfig.map(({ line }) => line).join('\n') + '\n'
}
}
class GitRefSpec {
constructor({ remotePath, localPath, force, matchPrefix }) {
Object.assign(this, {
remotePath,
localPath,
force,
matchPrefix,
});
}
static from(refspec) {
const [
forceMatch,
remotePath,
remoteGlobMatch,
localPath,
localGlobMatch,
] = refspec.match(/^(\+?)(.*?)(\*?):(.*?)(\*?)$/).slice(1);
const force = forceMatch === '+';
const remoteIsGlob = remoteGlobMatch === '*';
const localIsGlob = localGlobMatch === '*';
// validate
// TODO: Make this check more nuanced, and depend on whether this is a fetch refspec or a push refspec
if (remoteIsGlob !== localIsGlob) {
throw new InternalError('Invalid refspec')
}
return new GitRefSpec({
remotePath,
localPath,
force,
matchPrefix: remoteIsGlob,
})
// TODO: We need to run resolveRef on both paths to expand them to their full name.
}
translate(remoteBranch) {
if (this.matchPrefix) {
if (remoteBranch.startsWith(this.remotePath)) {
return this.localPath + remoteBranch.replace(this.remotePath, '')
}
} else {
if (remoteBranch === this.remotePath) return this.localPath
}
return null
}
reverseTranslate(localBranch) {
if (this.matchPrefix) {
if (localBranch.startsWith(this.localPath)) {
return this.remotePath + localBranch.replace(this.localPath, '')
}
} else {
if (localBranch === this.localPath) return this.remotePath
}
return null
}
}
class GitRefSpecSet {
constructor(rules = []) {
this.rules = rules;
}
static from(refspecs) {
const rules = [];
for (const refspec of refspecs) {
rules.push(GitRefSpec.from(refspec)); // might throw
}
return new GitRefSpecSet(rules)
}
add(refspec) {
const rule = GitRefSpec.from(refspec); // might throw
this.rules.push(rule);
}
translate(remoteRefs) {
const result = [];
for (const rule of this.rules) {
for (const remoteRef of remoteRefs) {
const localRef = rule.translate(remoteRef);
if (localRef) {
result.push([remoteRef, localRef]);
}
}
}
return result
}
translateOne(remoteRef) {
let result = null;
for (const rule of this.rules) {
const localRef = rule.translate(remoteRef);
if (localRef) {
result = localRef;
}
}
return result
}
localNamespaces() {
return this.rules
.filter(rule => rule.matchPrefix)
.map(rule => rule.localPath.replace(/\/$/, ''))
}
}
function compareRefNames(a, b) {
// https://stackoverflow.com/a/40355107/2168416
const _a = a.replace(/\^\{\}$/, '');
const _b = b.replace(/\^\{\}$/, '');
const tmp = -(_a < _b) || +(_a > _b);
if (tmp === 0) {
return a.endsWith('^{}') ? 1 : -1
}
return tmp
}
// This is straight from parse_unit_factor in config.c of canonical git
const num = val => {
if (typeof val === 'number') {
return val
}
val = val.toLowerCase();
let n = parseInt(val);
if (val.endsWith('k')) n *= 1024;
if (val.endsWith('m')) n *= 1024 * 1024;
if (val.endsWith('g')) n *= 1024 * 1024 * 1024;
return n
};
// This is straight from git_parse_maybe_bool_text in config.c of canonical git
const bool = val => {
if (typeof val === 'boolean') {
return val
}
val = val.trim().toLowerCase();
if (val === 'true' || val === 'yes' || val === 'on') return true
if (val === 'false' || val === 'no' || val === 'off') return false
throw Error(
`Expected 'true', 'false', 'yes', 'no', 'on', or 'off', but got ${val}`
)
};
const schema = {
core: {
filemode: bool,
bare: bool,
logallrefupdates: bool,
symlinks: bool,
ignorecase: bool,
bigFileThreshold: num,
},
};
// https://git-scm.com/docs/git-config#_syntax
// section starts with [ and ends with ]
// section is alphanumeric (ASCII) with - and .
// section is case insensitive
// subsection is optional
// subsection is specified after section and one or more spaces
// subsection is specified between double quotes
const SECTION_LINE_REGEX = /^\[([A-Za-z0-9-.]+)(?: "(.*)")?\]$/;
const SECTION_REGEX = /^[A-Za-z0-9-.]+$/;
// variable lines contain a name, and equal sign and then a value
// variable lines can also only contain a name (the implicit value is a boolean true)
// variable name is alphanumeric (ASCII) with -
// variable name starts with an alphabetic character
// variable name is case insensitive
const VARIABLE_LINE_REGEX = /^([A-Za-z][A-Za-z-]*)(?: *= *(.*))?$/;
const VARIABLE_NAME_REGEX = /^[A-Za-z][A-Za-z-]*$/;
// Comments start with either # or ; and extend to the end of line
const VARIABLE_VALUE_COMMENT_REGEX = /^(.*?)( *[#;].*)$/;
const extractSectionLine = line => {
const matches = SECTION_LINE_REGEX.exec(line);
if (matches != null) {
const [section, subsection] = matches.slice(1);
return [section, subsection]
}
return null
};
const extractVariableLine = line => {
const matches = VARIABLE_LINE_REGEX.exec(line);
if (matches != null) {
const [name, rawValue = 'true'] = matches.slice(1);
const valueWithoutComments = removeComments(rawValue);
const valueWithoutQuotes = removeQuotes(valueWithoutComments);
return [name, valueWithoutQuotes]
}
return null
};
const removeComments = rawValue => {
const commentMatches = VARIABLE_VALUE_COMMENT_REGEX.exec(rawValue);
if (commentMatches == null) {
return rawValue
}
const [valueWithoutComment, comment] = commentMatches.slice(1);
// if odd number of quotes before and after comment => comment is escaped
if (
hasOddNumberOfQuotes(valueWithoutComment) &&
hasOddNumberOfQuotes(comment)
) {
return `${valueWithoutComment}${comment}`
}
return valueWithoutComment
};
const hasOddNumberOfQuotes = text => {
const numberOfQuotes = (text.match(/(?:^|[^\\])"/g) || []).length;
return numberOfQuotes % 2 !== 0
};
const removeQuotes = text => {
return text.split('').reduce((newText, c, idx, text) => {
const isQuote = c === '"' && text[idx - 1] !== '\\';
const isEscapeForQuote = c === '\\' && text[idx + 1] === '"';
if (isQuote || isEscapeForQuote) {
return newText
}
return newText + c
}, '')
};
const lower = text => {
return text != null ? text.toLowerCase() : null
};
const getPath = (section, subsection, name) => {
return [lower(section), subsection, lower(name)]
.filter(a => a != null)
.join('.')
};
const normalizePath = path => {
const pathSegments = path.split('.');
const section = pathSegments.shift();
const name = pathSegments.pop();
const subsection = pathSegments.length ? pathSegments.join('.') : undefined;
return {
section,
subsection,
name,
path: getPath(section, subsection, name),
sectionPath: getPath(section, subsection, null),
isSection: !!section,
}
};
const findLastIndex = (array, callback) => {
return array.reduce((lastIndex, item, index) => {
return callback(item) ? index : lastIndex
}, -1)
};
// Note: there are a LOT of edge cases that aren't covered (e.g. keys in sections that also
// have subsections, [include] directives, etc.
class GitConfig {
constructor(text) {
let section = null;
let subsection = null;
this.parsedConfig = text
? text.split('\n').map(line => {
let name = null;
let value = null;
const trimmedLine = line.trim();
const extractedSection = extractSectionLine(trimmedLine);
const isSection = extractedSection != null;
if (isSection) {
;[section, subsection] = extractedSection;
} else {
const extractedVariable = extractVariableLine(trimmedLine);
const isVariable = extractedVariable != null;
if (isVariable) {
;[name, value] = extractedVariable;
}
}
const path = getPath(section, subsection, name);
return { line, isSection, section, subsection, name, value, path }
})
: [];
}
static from(text) {
return new GitConfig(text)
}
async get(path, getall = false) {
const normalizedPath = normalizePath(path).path;
const allValues = this.parsedConfig
.filter(config => config.path === normalizedPath)
.map(({ section, name, value }) => {
const fn = schema[section] && schema[section][name];
return fn ? fn(value) : value
});
return getall ? allValues : allValues.pop()
}
async getall(path) {
return this.get(path, true)
}
async getSubsections(section) {
return this.parsedConfig
.filter(config => config.isSection && config.section === section)
.map(config => config.subsection)
}
async deleteSection(section, subsection) {
this.parsedConfig = this.parsedCon