alepm
Version:
Advanced and secure Node.js package manager with binary storage, intelligent caching, and comprehensive security features
457 lines (360 loc) • 13.8 kB
JavaScript
const fs = require('fs-extra');
const path = require('path');
const tar = require('tar');
const zlib = require('zlib');
const crypto = require('crypto');
const { promisify } = require('util');
const gzip = promisify(zlib.gzip);
const gunzip = promisify(zlib.gunzip);
class BinaryStorage {
constructor() {
this.storageDir = path.join(require('os').homedir(), '.alepm', 'storage');
this.indexFile = path.join(this.storageDir, 'index.bin');
this.dataFile = path.join(this.storageDir, 'data.bin');
this.compressionLevel = 9; // Maximum compression
this.init();
}
async init() {
await fs.ensureDir(this.storageDir);
if (!fs.existsSync(this.indexFile)) {
await this.createIndex();
}
if (!fs.existsSync(this.dataFile)) {
await this.createDataFile();
}
}
async createIndex() {
// Binary index format:
// Header: ALEPM_IDX (8 bytes) + Version (4 bytes) + Entry Count (4 bytes)
// Entry: Hash (32 bytes) + Offset (8 bytes) + Size (8 bytes) + CompressedSize (8 bytes) + Timestamp (8 bytes)
const header = Buffer.alloc(16);
header.write('ALEPM_IDX', 0, 'ascii'); // Magic header
header.writeUInt32BE(1, 8); // Version
header.writeUInt32BE(0, 12); // Entry count
await fs.writeFile(this.indexFile, header);
}
async createDataFile() {
// Binary data file format:
// Header: ALEPM_DAT (8 bytes) + Version (4 bytes) + Reserved (4 bytes)
const header = Buffer.alloc(16);
header.write('ALEPM_DAT', 0, 'ascii');
header.writeUInt32BE(1, 8);
header.writeUInt32BE(0, 12); // Reserved
await fs.writeFile(this.dataFile, header);
}
async store(packageName, version, tarballData) {
const key = `${packageName}@${version}`;
const hash = crypto.createHash('sha256').update(key).digest();
// Compress the tarball
const compressedData = await gzip(tarballData, { level: this.compressionLevel });
// Get current data file size for offset
const dataStats = await fs.stat(this.dataFile);
const offset = dataStats.size;
// Append compressed data to data file
const dataFd = await fs.open(this.dataFile, 'a');
await fs.write(dataFd, compressedData);
await fs.close(dataFd);
// Update index
await this.updateIndex(hash, offset, tarballData.length, compressedData.length);
return {
hash: hash.toString('hex'),
offset,
size: tarballData.length,
compressedSize: compressedData.length,
compressionRatio: compressedData.length / tarballData.length
};
}
async retrieve(packageName, version) {
const key = `${packageName}@${version}`;
const hash = crypto.createHash('sha256').update(key).digest();
// Find entry in index
const indexEntry = await this.findIndexEntry(hash);
if (!indexEntry) {
return null;
}
// Read compressed data
const dataFd = await fs.open(this.dataFile, 'r');
const compressedData = Buffer.alloc(indexEntry.compressedSize);
await fs.read(dataFd, compressedData, 0, indexEntry.compressedSize, indexEntry.offset);
await fs.close(dataFd);
// Decompress
const originalData = await gunzip(compressedData);
return originalData;
}
async extract(packageData, targetDir) {
if (!packageData) {
throw new Error('Package data is null or undefined');
}
await fs.ensureDir(targetDir);
// Create temporary tarball file
const tempTarball = path.join(require('os').tmpdir(), `alepm-extract-${Date.now()}.tgz`);
try {
await fs.writeFile(tempTarball, packageData);
// Extract tarball to target directory
await tar.extract({
file: tempTarball,
cwd: targetDir,
strip: 1, // Remove the package/ prefix
filter: (path, _entry) => {
// Security: prevent path traversal
const normalizedPath = path.normalize(path);
return !normalizedPath.startsWith('../') && !normalizedPath.includes('/../');
}
});
} finally {
// Clean up temporary file
if (fs.existsSync(tempTarball)) {
await fs.remove(tempTarball);
}
}
}
async updateIndex(hash, offset, size, compressedSize) {
const indexFd = await fs.open(this.indexFile, 'r+');
try {
// Read header to get entry count
const header = Buffer.alloc(16);
await fs.read(indexFd, header, 0, 16, 0);
const entryCount = header.readUInt32BE(12);
// Create new entry
const entry = Buffer.alloc(64); // 32 + 8 + 8 + 8 + 8 = 64 bytes
hash.copy(entry, 0); // Hash (32 bytes)
entry.writeBigUInt64BE(BigInt(offset), 32); // Offset (8 bytes)
entry.writeBigUInt64BE(BigInt(size), 40); // Size (8 bytes)
entry.writeBigUInt64BE(BigInt(compressedSize), 48); // Compressed size (8 bytes)
entry.writeBigUInt64BE(BigInt(Date.now()), 56); // Timestamp (8 bytes)
// Append entry to index
const entryOffset = 16 + (entryCount * 64);
await fs.write(indexFd, entry, 0, 64, entryOffset);
// Update entry count in header
header.writeUInt32BE(entryCount + 1, 12);
await fs.write(indexFd, header, 0, 16, 0);
} finally {
await fs.close(indexFd);
}
}
async findIndexEntry(hash) {
const indexFd = await fs.open(this.indexFile, 'r');
try {
// Read header
const header = Buffer.alloc(16);
await fs.read(indexFd, header, 0, 16, 0);
const entryCount = header.readUInt32BE(12);
// Search for matching hash
for (let i = 0; i < entryCount; i++) {
const entryOffset = 16 + (i * 64);
const entry = Buffer.alloc(64);
await fs.read(indexFd, entry, 0, 64, entryOffset);
const entryHash = entry.slice(0, 32);
if (entryHash.equals(hash)) {
return {
hash: entryHash,
offset: Number(entry.readBigUInt64BE(32)),
size: Number(entry.readBigUInt64BE(40)),
compressedSize: Number(entry.readBigUInt64BE(48)),
timestamp: Number(entry.readBigUInt64BE(56))
};
}
}
return null;
} finally {
await fs.close(indexFd);
}
}
async remove(packageName, version) {
const key = `${packageName}@${version}`;
const hash = crypto.createHash('sha256').update(key).digest();
// Find entry
const entry = await this.findIndexEntry(hash);
if (!entry) {
return false;
}
// For simplicity, we'll mark the entry as deleted by zeroing the hash
// In a production system, you might want to implement compaction
await this.markEntryDeleted(hash);
return true;
}
async markEntryDeleted(hash) {
const indexFd = await fs.open(this.indexFile, 'r+');
try {
const header = Buffer.alloc(16);
await fs.read(indexFd, header, 0, 16, 0);
const entryCount = header.readUInt32BE(12);
for (let i = 0; i < entryCount; i++) {
const entryOffset = 16 + (i * 64);
const entryHash = Buffer.alloc(32);
await fs.read(indexFd, entryHash, 0, 32, entryOffset);
if (entryHash.equals(hash)) {
// Zero out the hash to mark as deleted
const zeroHash = Buffer.alloc(32);
await fs.write(indexFd, zeroHash, 0, 32, entryOffset);
break;
}
}
} finally {
await fs.close(indexFd);
}
}
async compact() {
// Create new temporary files
const newIndexFile = this.indexFile + '.tmp';
const newDataFile = this.dataFile + '.tmp';
await this.createIndex();
await fs.rename(this.indexFile, newIndexFile);
await this.createIndex();
await this.createDataFile();
await fs.rename(this.dataFile, newDataFile);
await this.createDataFile();
// Read old index and copy non-deleted entries
const oldIndexFd = await fs.open(newIndexFile, 'r');
const oldDataFd = await fs.open(newDataFile, 'r');
const newDataFd = await fs.open(this.dataFile, 'a');
let newDataOffset = 16; // Skip header
let compactedEntries = 0;
let spaceFreed = 0;
try {
const header = Buffer.alloc(16);
await fs.read(oldIndexFd, header, 0, 16, 0);
const entryCount = header.readUInt32BE(12);
for (let i = 0; i < entryCount; i++) {
const entryOffset = 16 + (i * 64);
const entry = Buffer.alloc(64);
await fs.read(oldIndexFd, entry, 0, 64, entryOffset);
const entryHash = entry.slice(0, 32);
const isDeleted = entryHash.every(byte => byte === 0);
if (!isDeleted) {
const oldOffset = Number(entry.readBigUInt64BE(32));
const size = Number(entry.readBigUInt64BE(40));
const compressedSize = Number(entry.readBigUInt64BE(48));
// Copy data to new file
const data = Buffer.alloc(compressedSize);
await fs.read(oldDataFd, data, 0, compressedSize, oldOffset);
await fs.write(newDataFd, data, 0, compressedSize, newDataOffset);
// Update entry with new offset
entry.writeBigUInt64BE(BigInt(newDataOffset), 32);
// Add to new index
await this.updateIndex(entryHash, newDataOffset, size, compressedSize);
newDataOffset += compressedSize;
compactedEntries++;
} else {
spaceFreed += Number(entry.readBigUInt64BE(48));
}
}
} finally {
await fs.close(oldIndexFd);
await fs.close(oldDataFd);
await fs.close(newDataFd);
}
// Remove temporary files
await fs.remove(newIndexFile);
await fs.remove(newDataFile);
return {
entriesCompacted: compactedEntries,
spaceFreed
};
}
async getStats() {
const indexStats = await fs.stat(this.indexFile);
const dataStats = await fs.stat(this.dataFile);
const header = Buffer.alloc(16);
const indexFd = await fs.open(this.indexFile, 'r');
await fs.read(indexFd, header, 0, 16, 0);
await fs.close(indexFd);
const entryCount = header.readUInt32BE(12);
// Calculate compression stats
let totalOriginalSize = 0;
let totalCompressedSize = 0;
let activeEntries = 0;
const indexFd2 = await fs.open(this.indexFile, 'r');
try {
for (let i = 0; i < entryCount; i++) {
const entryOffset = 16 + (i * 64);
const entry = Buffer.alloc(64);
await fs.read(indexFd2, entry, 0, 64, entryOffset);
const entryHash = entry.slice(0, 32);
const isDeleted = entryHash.every(byte => byte === 0);
if (!isDeleted) {
totalOriginalSize += Number(entry.readBigUInt64BE(40));
totalCompressedSize += Number(entry.readBigUInt64BE(48));
activeEntries++;
}
}
} finally {
await fs.close(indexFd2);
}
return {
indexSize: indexStats.size,
dataSize: dataStats.size,
totalSize: indexStats.size + dataStats.size,
totalEntries: entryCount,
activeEntries,
deletedEntries: entryCount - activeEntries,
totalOriginalSize,
totalCompressedSize,
compressionRatio: totalOriginalSize > 0 ? totalCompressedSize / totalOriginalSize : 0,
spaceEfficiency: (totalOriginalSize - totalCompressedSize) / totalOriginalSize || 0
};
}
async verify() {
const stats = await this.getStats();
const errors = [];
// Verify index file integrity
try {
const indexFd = await fs.open(this.indexFile, 'r');
const header = Buffer.alloc(16);
await fs.read(indexFd, header, 0, 16, 0);
const magic = header.toString('ascii', 0, 8);
if (magic !== 'ALEPM_IDX') {
errors.push('Invalid index file magic header');
}
await fs.close(indexFd);
} catch (error) {
errors.push(`Index file error: ${error.message}`);
}
// Verify data file integrity
try {
const dataFd = await fs.open(this.dataFile, 'r');
const header = Buffer.alloc(16);
await fs.read(dataFd, header, 0, 16, 0);
const magic = header.toString('ascii', 0, 8);
if (magic !== 'ALEPM_DAT') {
errors.push('Invalid data file magic header');
}
await fs.close(dataFd);
} catch (error) {
errors.push(`Data file error: ${error.message}`);
}
return {
isValid: errors.length === 0,
errors,
stats
};
}
// Utility methods for different storage formats
async storeTarball(tarballData) {
return await this.storeRaw(tarballData);
}
async storeRaw(data) {
const hash = crypto.createHash('sha256').update(data).digest();
const compressed = await gzip(data, { level: this.compressionLevel });
const stats = await fs.stat(this.dataFile);
const offset = stats.size;
const dataFd = await fs.open(this.dataFile, 'a');
await fs.write(dataFd, compressed);
await fs.close(dataFd);
await this.updateIndex(hash, offset, data.length, compressed.length);
return hash.toString('hex');
}
async retrieveByHash(hashString) {
const hash = Buffer.from(hashString, 'hex');
const entry = await this.findIndexEntry(hash);
if (!entry) {
return null;
}
const dataFd = await fs.open(this.dataFile, 'r');
const compressed = Buffer.alloc(entry.compressedSize);
await fs.read(dataFd, compressed, 0, entry.compressedSize, entry.offset);
await fs.close(dataFd);
return await gunzip(compressed);
}
}
module.exports = BinaryStorage;