tiny-commit-walker
Version:
tiny commit walker
348 lines • 12.4 kB
JavaScript
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const LRU = require("lru-cache");
const fs = require("fs");
const promisify = require("util.promisify");
const path = require("path");
const zlib = require("zlib");
const delta_1 = require("./delta");
const statAsync = promisify(fs.stat);
const readFileAsync = promisify(fs.readFile);
const readDirAsync = promisify(fs.readdir);
const openFileAsync = promisify(fs.open);
const closeFileAsync = promisify(fs.close);
const readAsync = promisify(fs.read);
const inflateAsync = promisify(zlib.inflate);
var ObjectTypeEnum;
(function (ObjectTypeEnum) {
ObjectTypeEnum[ObjectTypeEnum["BAD"] = -1] = "BAD";
ObjectTypeEnum[ObjectTypeEnum["NONE"] = 0] = "NONE";
ObjectTypeEnum[ObjectTypeEnum["COMMIT"] = 1] = "COMMIT";
ObjectTypeEnum[ObjectTypeEnum["TREE"] = 2] = "TREE";
ObjectTypeEnum[ObjectTypeEnum["BLOB"] = 3] = "BLOB";
ObjectTypeEnum[ObjectTypeEnum["TAG"] = 4] = "TAG";
ObjectTypeEnum[ObjectTypeEnum["OFS_DELTA"] = 6] = "OFS_DELTA";
ObjectTypeEnum[ObjectTypeEnum["REF_DELTA"] = 7] = "REF_DELTA";
ObjectTypeEnum[ObjectTypeEnum["ANY"] = 8] = "ANY";
ObjectTypeEnum[ObjectTypeEnum["MAX"] = 9] = "MAX";
})(ObjectTypeEnum || (ObjectTypeEnum = {}));
const packsCache = LRU(4);
class PackedObject {
constructor(idx, buff) {
let c = buff[0];
let type = (c >> 4) & 7;
let size = (c & 15);
let offset = 1;
let x = 16;
while (c & 0x80) {
c = buff[offset++];
size += (c & 0x7f) * x;
x *= 128;
}
this.type = type;
this.size = size;
this.offset = offset;
}
}
function setupPackedIndexMap(idxFileBuffer, fileIndex, map) {
let idxVersion;
let index = 255 * 4;
if (idxFileBuffer.readUInt32BE(0) === 0xff744f63) {
idxVersion = idxFileBuffer.readUInt32BE(4);
index += 8;
}
else {
idxVersion = 1;
}
const n = idxFileBuffer.readUInt32BE(index);
index += 4;
if (idxVersion > 1) {
let off32 = index + n * 24;
let off64 = off32 + n * 4;
for (let i = 0; i < n; i++) {
const hash = readHash(idxFileBuffer, index);
index += 20;
let offset = idxFileBuffer.readUInt32BE(off32);
off32 += 4;
if (offset & 0x80000000) {
offset = idxFileBuffer.readUInt32BE(off64 * 4294967296);
offset += idxFileBuffer.readUInt16BE(off64 += 4);
off64 += 4;
}
map.set(hash, { offset, fileIndex });
}
}
else {
for (let i = 0; i < n; i++) {
const offset = idxFileBuffer.readUInt32BE(index);
const hash = readHash(idxFileBuffer, index += 4);
index += 20;
map.set(hash, { offset, fileIndex });
}
}
}
const processings = {};
class Packs {
constructor(packDir, packFileNames = [], packedIndexMap = new Map()) {
this.packDir = packDir;
this.packFileNames = packFileNames;
this.packedIndexMap = packedIndexMap;
this._packedObjectCache = new Map();
this.hasPackFiles = !!this.packFileNames.length;
}
static initialize(gitDir) {
return __awaiter(this, void 0, void 0, function* () {
const packs = packsCache.get(gitDir);
if (packs)
return packs;
if (processings[gitDir])
return processings[gitDir];
const promise = this._initialize(gitDir);
processings[gitDir] = promise;
return yield promise.then(packs => {
delete processings[gitDir];
packs && packsCache.set(gitDir, packs);
return packs;
});
});
}
static _initialize(gitDir) {
return __awaiter(this, void 0, void 0, function* () {
const packDir = path.join(gitDir, 'objects', 'pack');
let fileNames;
try {
fileNames = (yield readDirAsync(packDir))
.filter(name => /\.idx$/.test(name))
.map(name => name.split(".").shift());
}
catch (e) {
return new Packs(gitDir);
}
if (!fileNames.length) {
return new Packs(gitDir);
}
const packedIndexMap = new Map();
for (let i = 0; i < fileNames.length; i++) {
const buff = yield readFileAsync(path.join(packDir, fileNames[i] + ".idx"));
setupPackedIndexMap(buff, i, packedIndexMap);
}
return new Packs(packDir, fileNames, packedIndexMap);
});
}
static initializeSync(gitDir) {
let packs = packsCache.get(gitDir);
if (packs) {
return packs;
}
const packDir = path.join(gitDir, 'objects', 'pack');
let fileNames;
try {
fileNames = fs.readdirSync(packDir)
.filter(name => /\.idx$/.test(name))
.map(name => name.split(".").shift());
}
catch (e) {
packs = new Packs(gitDir);
packsCache.set(gitDir, packs);
return packs;
}
if (!fileNames.length) {
packs = new Packs(gitDir);
packsCache.set(gitDir, packs);
return packs;
}
const packedIndexMap = new Map();
for (let i = 0; i < fileNames.length; i++) {
const buff = fs.readFileSync(path.join(packDir, fileNames[i] + ".idx"));
setupPackedIndexMap(buff, i, packedIndexMap);
}
packs = new Packs(packDir, fileNames, packedIndexMap);
packsCache.set(gitDir, packs);
return packs;
}
_getPackedIndexFromCache(hash) {
const idx = this.packedIndexMap.get(hash);
if (!idx) {
throw new Error(`${hash} is not found.`);
}
return idx;
}
_getPackedObjectBufferFromCach(idx) {
return this._packedObjectCache.get(`${idx.fileIndex}:${idx.offset}`);
}
_setPackedObjectBuffrToCache(idx, buff) {
return this._packedObjectCache.set(`${idx.fileIndex}:${idx.offset}`, buff);
}
_getPackFilePath(idx) {
return path.join(this.packDir, this.packFileNames[idx.fileIndex] + '.pack');
}
unpackGitObject(hash) {
return __awaiter(this, void 0, void 0, function* () {
const idx = this._getPackedIndexFromCache(hash);
let dst = this._getPackedObjectBufferFromCach(idx);
if (dst) {
return dst;
}
const fd = yield openFileAsync(this._getPackFilePath(idx), 'r');
try {
dst = yield this._unpackGitObject(fd, idx);
}
catch (e) {
throw e;
}
finally {
yield closeFileAsync(fd);
}
return dst;
});
}
unpackGitObjectSync(hash) {
const idx = this._getPackedIndexFromCache(hash);
let dst = this._getPackedObjectBufferFromCach(idx);
if (dst) {
return dst;
}
const fd = fs.openSync(this._getPackFilePath(idx), 'r');
try {
dst = this._unpackGitObjectSync(fd, idx);
}
catch (e) {
throw e;
}
finally {
fs.closeSync(fd);
}
return dst;
}
_unpackGitObject(fd, idx) {
return __awaiter(this, void 0, void 0, function* () {
let dst = this._getPackedObjectBufferFromCach(idx);
if (dst) {
return dst;
}
const head = Buffer.alloc(32);
yield readAsync(fd, head, 0, head.length, idx.offset);
const po = new PackedObject(idx, head);
switch (po.type) {
case ObjectTypeEnum.COMMIT:
// case ObjectTypeEnum.TREE:
// case ObjectTypeEnum.BLOB:
case ObjectTypeEnum.TAG: {
dst = yield inf(fd, idx, po);
break;
}
case ObjectTypeEnum.OFS_DELTA:
case ObjectTypeEnum.REF_DELTA:
dst = yield this._unpackDeltaObject(fd, idx, po, head);
break;
}
if (!dst) {
throw new Error(`${po.type} is a invalid object type.`);
}
this._setPackedObjectBuffrToCache(idx, dst);
return dst;
});
}
_unpackGitObjectSync(fd, idx) {
let dst = this._getPackedObjectBufferFromCach(idx);
if (dst) {
return dst;
}
const head = Buffer.alloc(32);
fs.readSync(fd, head, 0, head.length, idx.offset);
const po = new PackedObject(idx, head);
switch (po.type) {
case ObjectTypeEnum.COMMIT:
// case ObjectTypeEnum.TREE:
// case ObjectTypeEnum.BLOB:
case ObjectTypeEnum.TAG: {
dst = infSync(fd, idx, po);
break;
}
case ObjectTypeEnum.OFS_DELTA:
case ObjectTypeEnum.REF_DELTA:
dst = this._unpackDeltaObjectSync(fd, idx, po, head);
break;
}
if (!dst) {
throw new Error(`${po.type} is a invalid object type.`);
}
this._setPackedObjectBuffrToCache(idx, dst);
return dst;
}
_unpackDeltaObject(fd, idx, po, head) {
return __awaiter(this, void 0, void 0, function* () {
let src;
if (po.type === ObjectTypeEnum.OFS_DELTA) {
const [baseOffset, offset] = delta_1.readBaseOffset(head, po.offset);
po.offset = offset;
src = yield this._unpackGitObject(fd, {
offset: idx.offset - baseOffset,
fileIndex: idx.fileIndex
});
}
else {
src = (yield this.unpackGitObject(readHash(head, po.offset)));
po.offset += 20;
}
return delta_1.patchDelta(src, yield inf(fd, idx, po));
});
}
_unpackDeltaObjectSync(fd, idx, po, head) {
let src;
if (po.type === ObjectTypeEnum.OFS_DELTA) {
const [baseOffset, offset] = delta_1.readBaseOffset(head, po.offset);
po.offset = offset;
src = this._unpackGitObjectSync(fd, {
offset: idx.offset - baseOffset,
fileIndex: idx.fileIndex
});
}
else {
src = this.unpackGitObjectSync(readHash(head, po.offset));
po.offset += 20;
}
return delta_1.patchDelta(src, infSync(fd, idx, po));
}
}
exports.Packs = Packs;
function readHash(buff, offset) {
return buff.slice(offset, offset + 20).toString('hex');
}
function infSync(fd, idx, po, size = po.size * 2 + 32) {
const buff = Buffer.allocUnsafe(size);
fs.readSync(fd, buff, 0, buff.length, idx.offset + po.offset);
try {
return zlib.inflateSync(buff);
}
catch (e) {
if (e.errno !== -5) {
throw e;
}
return infSync(fd, idx, po, size + 128);
}
}
function inf(fd, idx, po, size = po.size * 2 + 32) {
return __awaiter(this, void 0, void 0, function* () {
const buff = Buffer.allocUnsafe(size);
yield readAsync(fd, buff, 0, buff.length, idx.offset + po.offset);
try {
return yield inflateAsync(buff);
}
catch (e) {
if (e.errno !== -5) {
throw e;
}
return yield inf(fd, idx, po, size + 128);
}
});
}
//# sourceMappingURL=pack.js.map