eths-git
Version:
**eths-git-remote** is a decentralized Git solution designed to manage repositories on-chain. It provides two main components:
247 lines (246 loc) • 10.4 kB
JavaScript
import * as fs from 'fs';
import * as path from 'path';
import { ethers } from 'ethers';
import { UploadType } from "ethstorage-sdk";
import { NodeFile } from "ethstorage-sdk/file";
import { log } from "../utils/index.js";
const ZERO_ADDRESS_HEX = '0x0000000000000000000000000000000000000000';
const MAX_RPC_RETRIES = 3;
const RPC_RETRY_DELAY_MS = 1000;
export const stringToHex = (s) => ethers.hexlify(ethers.toUtf8Bytes(s));
async function withRetry(methodName, fn, isWrite = false) {
for (let attempt = 1; attempt <= MAX_RPC_RETRIES; attempt++) {
try {
return await fn();
}
catch (error) {
if (attempt === MAX_RPC_RETRIES) {
log(`[ERROR] RPC call ${methodName} failed after ${MAX_RPC_RETRIES} attempts.`);
throw error;
}
const errorType = isWrite ? 'Transaction' : 'Read';
log(`[WARN] ${errorType} call ${methodName} failed (Attempt ${attempt}/${MAX_RPC_RETRIES}). Retrying in ${RPC_RETRY_DELAY_MS}ms. Error: ${error.message}`);
await new Promise(resolve => setTimeout(resolve, RPC_RETRY_DELAY_MS));
}
}
// Should be unreachable
throw new Error(`Exceeded max retries for ${methodName}`);
}
export class ContractDriver {
provider;
signer;
contract;
flatDirectory;
constructor(rpcUrl, signer, contractAddr, abi, flatDirectory) {
this.provider = new ethers.JsonRpcProvider(rpcUrl);
this.signer = signer.connect(this.provider);
this.contract = new ethers.Contract(contractAddr, abi, this.signer);
this.flatDirectory = flatDirectory;
}
// --- Utility Methods ---
toHex(oid) {
if (!oid)
return ZERO_ADDRESS_HEX;
const cleanOid = oid.replace(/^0x/i, '');
// Ensures 40-byte hash padding for safety
return '0x' + cleanOid.padStart(40, '0');
}
fromHex(hash) {
return hash.startsWith('0x') ? hash.slice(2) : hash;
}
// --- Read Operations (with Retry) ---
async getDefaultBranch() {
const [refBytes, shaHex] = await withRetry("getDefaultBranch", () => this.contract.getDefaultBranch());
if (refBytes.length === 0) {
return {
ref: '',
sha: this.fromHex(shaHex)
};
}
return {
ref: ethers.toUtf8String(refBytes),
sha: this.fromHex(shaHex)
};
}
async listBranches(start = 0, limit = 50) {
const list = await withRetry("listBranches", () => this.contract.listBranches(start, limit));
return list.map((item) => ({
ref: ethers.toUtf8String(item.name),
sha: this.fromHex(item.hash)
}));
}
async hasPushPermission() {
return await withRetry("canPush", () => this.contract.canPush(this.signer.address));
}
async hasForcePushPermission(refName) {
const refNameBytes = ethers.toUtf8Bytes(refName);
return await withRetry("canForcePush", () => this.contract.canForcePush(this.signer.address, refNameBytes));
}
async getPushRecords(refName, start, limit) {
const refNameBytes = ethers.toUtf8Bytes(refName);
const list = await withRetry("getPushRecords", () => this.contract.getPushRecords(refNameBytes, start, limit));
return list.map((item) => ({
newOid: this.fromHex(item.newOid),
parentOid: this.fromHex(item.parentOid),
packfileKey: ethers.toUtf8String(item.packfileKey),
size: Number(item.size),
timestamp: Number(item.timestamp),
pusher: item.pusher
}));
}
async getPushRecordsCount(refName) {
const refNameBytes = ethers.toUtf8Bytes(refName);
const count = await withRetry("getPushRecordCount", () => this.contract.getPushRecordCount(refNameBytes));
return Number(count);
}
// --- Write Operations (with Retry) ---
async writeRef(update) {
const { refName, parentOid, newOid, size } = update;
const refNameBytes = ethers.toUtf8Bytes(refName);
const parentOidHex = this.toHex(parentOid);
const newOidHex = this.toHex(newOid);
const packfileKey = stringToHex(newOid);
const tx = await withRetry("push", async () => {
return await this.contract.push(refNameBytes, parentOidHex, newOidHex, packfileKey, size);
}, true);
log(`[INFO] ${refName}: Sending push transaction, hash: ${tx.hash}`);
const txRsp = await withRetry("push.wait", () => tx.wait(), true);
const success = txRsp?.status === 1;
if (!success) {
log(`[ERROR] ${refName}: Transaction failed on chain (status ${txRsp?.status}).`);
}
return success;
}
async writeForceRef(update) {
const { refName, parentOid, newOid, size } = update;
const parentIndex = update.parentIndex ?? 0;
const refNameBytes = ethers.toUtf8Bytes(refName);
const parentOidHex = this.toHex(parentOid);
const newOidHex = this.toHex(newOid);
const packfileKey = stringToHex(newOid);
const tx = await withRetry("forcePush", async () => {
return await this.contract.forcePush(refNameBytes, newOidHex, packfileKey, size, parentOidHex, parentIndex);
}, true);
log(`[INFO] ${refName}: Sending forcePush transaction, hash: ${tx.hash}`);
const txRsp = await withRetry("forcePush.wait", () => tx.wait(), true);
const success = txRsp?.status === 1;
if (!success) {
log(`[ERROR] ${refName}: Force push transaction failed on chain (status ${txRsp?.status}).`);
}
return success;
}
// --- FlatDirectory Operations (No RPC Retry - relies on FlatDirectory internal retry) ---
async uploadPack(dst, fileKey, packFilePath) {
let status = true;
let currentSuccessIndex = -1;
const uploadCallback = {
onTransactionSent: (txHash) => {
log(`[INFO] Upload tx sent: ${txHash}`);
},
onProgress: (progress, total, isChange) => {
const start = currentSuccessIndex + 1;
const end = progress;
if (start > end)
return;
currentSuccessIndex = progress;
const percent = ((progress / total) * 100).toFixed(1);
if (isChange) {
log(`[PROGRESS] packfile ${dst}: Uploaded chunks ${start}-${end} / ${total} (${percent}%)`);
}
else {
log(`[PROGRESS] packfile ${dst}: Chunks ${start}-${end} skipped (no change) / ${total} (${percent}%)`);
}
},
onFail: (err) => {
log(`[ERROR] pack file ${dst}: ${err.message}`);
status = false;
},
onFinish: (totalChunks, totalSize, totalCost) => {
log(`[INFO] Upload finished: ${totalChunks} chunks, ${totalSize} bytes`);
}
};
const hashesMap = await this.flatDirectory.fetchHashes([fileKey]);
const hashes = hashesMap[fileKey] || [];
const file = new NodeFile(packFilePath);
const request = {
key: fileKey,
content: file,
chunkHashes: hashes,
type: UploadType.Blob,
gasIncPct: 1,
isConfirmedNonce: true,
callback: uploadCallback
};
// Rely on flatDirectory.upload's internal retry mechanism
await this.flatDirectory.upload(request);
return status;
}
async downloadPackFile(fileName, filePath) {
await fs.promises.mkdir(path.dirname(filePath), { recursive: true });
const handle = await fs.promises.open(filePath, "w");
const writePromises = [];
let totalSize = 0;
let currentOffset = 0;
let finishedChunks = 0;
let lastLogTime = Date.now();
let finished = false;
try {
await new Promise((resolve, reject) => {
this.flatDirectory.download(fileName, {
onProgress: (currentChunk, totalChunks, chunkData) => {
const writeOffset = currentOffset;
currentOffset += chunkData.length;
totalSize += chunkData.length;
const p = handle.write(chunkData, 0, chunkData.length, writeOffset)
.then(() => {
finishedChunks++;
const now = Date.now();
if (now - lastLogTime > 1000 || finishedChunks === totalChunks) {
const percent = ((finishedChunks / totalChunks) * 100).toFixed(2);
log(`[${new Date().toLocaleTimeString()}] Download ${fileName}: ${percent}% (${finishedChunks}/${totalChunks})`);
lastLogTime = now;
}
})
.catch(err => {
if (!finished) {
finished = true;
reject(err);
}
});
writePromises.push(p);
},
onFail: (err) => {
if (!finished) {
finished = true;
reject(err);
}
},
onFinish: async () => {
if (finished)
return;
finished = true;
await Promise.all(writePromises);
await handle.close();
log(`[INFO] Download finished for ${fileName}. Total size: ${totalSize} bytes.`);
resolve();
},
});
});
return filePath;
}
catch (err) {
try {
await handle.close();
}
catch { }
try {
await fs.promises.unlink(filePath);
}
catch { }
throw err;
}
}
async close() {
await this.flatDirectory.close();
}
}