@shockpkg/core
Version:
shockpkg core
450 lines (374 loc) • 9.88 kB
JavaScript
;
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.arrayFilterAsync = arrayFilterAsync;
exports.arrayMapAsync = arrayMapAsync;
exports.streamEndError = streamEndError;
exports.promiseCatch = promiseCatch;
exports.lstatExists = lstatExists;
exports.readDir = readDir;
exports.hashFile = hashFile;
exports.hashNormalize = hashNormalize;
exports.fileHash = fileHash;
exports.fileHashVerify = fileHashVerify;
exports.fileSizeVerify = fileSizeVerify;
exports.dependSort = dependSort;
exports.streamVerify = streamVerify;
exports.streamRequest = streamRequest;
exports.streamRequestDownload = streamRequestDownload;
exports.zipEntryExtract = zipEntryExtract;
var _crypto = require("crypto");
var _fsExtra = _interopRequireDefault(require("fs-extra"));
/**
* Like array filter method, but with asyncronous callback.
*
* @param list The array to filter.
* @param filter Filter function.
* @return Filtered array.
*/
async function arrayFilterAsync(list, filter) {
const r = [];
for (const entry of list) {
if (await filter(entry)) {
r.push(entry);
}
}
return r;
}
/**
* Like array map method, but with asyncronous callback.
*
* @param list The array to map.
* @param filter Map function.
* @return Mapped array.
*/
async function arrayMapAsync(list, map) {
const r = [];
for (const entry of list) {
r.push((await map(entry)));
}
return r;
}
/**
* Promise for event emitter object to end successfully or in an error.
*
* @param obj Event emitter.
* @param end The end event name.
*/
async function streamEndError(obj, end) {
await new Promise((resolve, reject) => {
const done = err => {
if (err) {
reject(err);
return;
}
resolve();
};
obj.on(end, () => {
done(null);
});
obj.on('error', err => {
done(err);
});
});
}
/**
* Catch promise on error and return default value.
*
* @param p Promise object.
* @param d Default value.
* @return Resulting value.
*/
async function promiseCatch(p, d) {
let r = d;
try {
r = await p;
} catch (err) {// Do nothing.
}
return r;
}
/**
* Promise for lstating a path, null on error.
*
* @param path File path.
* @return Stat object or null.
*/
async function lstatExists(path) {
return promiseCatch(_fsExtra.default.lstat(path), null);
}
/**
* Read directory, optional skip dot files, sorted order.
*
* @param path Path to the directory to list.
* @param dotfile Include dot files in the list or not.
* @return Directory list, sorted order.
*/
async function readDir(path, dotfile = true) {
const list = await _fsExtra.default.readdir(path);
const r = [];
for (const entry of list) {
// Skip any dot files.
if (!dotfile && entry.charAt(0) === '.') {
continue;
}
r.push(entry);
}
r.sort();
return r;
}
/**
* Hash file using the specified algoithm.
*
* @param path File path.
* @param algorithm Hash algorithm.
* @param digest Digest encoding.
* @return Hash digest.
*/
async function hashFile(path, algorithm, encoding) {
const hasher = (0, _crypto.createHash)(algorithm);
const reader = _fsExtra.default.createReadStream(path);
reader.on('data', data => {
hasher.update(data);
});
await streamEndError(reader, 'close');
const digest = hasher.digest(encoding);
return hashNormalize(digest, encoding);
}
/**
* Normalize a hash string based on the encoding.
*
* @param hash Hash value.
* @param encoding Hash encoding.
* @return Normalized hash.
*/
function hashNormalize(hash, encoding) {
return encoding === 'hex' ? hash.toLowerCase() : hash;
}
/**
* Hash file using the specified algoithms.
*
* @param path File path.
* @param hashes Hash list.
*/
async function fileHash(path, hashes) {
const hashers = hashes.map(hash => ({
hash,
hasher: (0, _crypto.createHash)(hash.algorithm)
}));
const reader = _fsExtra.default.createReadStream(path);
reader.on('data', data => {
// Update hashers.
hashers.forEach(entry => entry.hasher.update(data));
});
await streamEndError(reader, 'close'); // Verify hashes.
for (const {
hash,
hasher
} of hashers) {
const {
encoding
} = hash;
const hashed = hasher.digest(encoding);
hash.digest = hashNormalize(hashed, encoding);
}
}
/**
* Verify file hash list using the specified algoithms.
*
* @param path File path.
* @param hashes Hash list.
*/
async function fileHashVerify(path, hashes) {
const all = hashes.map(hash => {
const hashed = {
algorithm: hash.algorithm,
encoding: hash.encoding,
digest: hash.digest
};
return {
hash,
hashed
};
});
await fileHash(path, all.map(entry => entry.hashed));
for (const {
hash,
hashed
} of all) {
const {
encoding,
algorithm
} = hash;
const hashedV = hashNormalize(hashed.digest, encoding);
const expectedV = hashNormalize(hash.digest, encoding);
if (hashedV === expectedV) {
continue;
}
throw new Error(`Invalid ${algorithm} hash: ${hashedV} expected: ${expectedV}`);
}
}
/**
* Verify file size or throw error.
*
* @param path File path.
* @param size Expected size.
*/
async function fileSizeVerify(path, size) {
const stat = await _fsExtra.default.lstat(path);
const fSize = stat.size;
if (fSize !== size) {
throw new Error(`Invalid file size: ${fSize} expected: ${size}`);
}
}
/**
* Sort entries on dependencies listed for each entry.
* Sorts the array in-place.
*
* @param list The array to sort.
* @param deps Get the list of dependencies for each entry.
* @return Sorted array.
*/
function dependSort(list, deps) {
const m = new Map();
for (const entry of list) {
m.set(entry, new Set(deps(entry)));
}
return list.sort((a, b) => {
const aDeps = m.get(a);
if (aDeps.has(b)) {
return 1;
}
const bDeps = m.get(b);
if (bDeps.has(a)) {
return -1;
}
return 0;
});
}
/**
* Stream verifier.
*
* @param source Request stream.
* @param endEvent The end event name.
* @param size Expected size.
* @param hashes Expected hashes.
* @param onData Data event handler, can throw to cancel download.
*/
async function streamVerify(source, endEvent, size = null, hashes = null, onData = null) {
const hashers = (hashes || []).map(hash => ({
hash,
hasher: (0, _crypto.createHash)(hash.algorithm)
}));
let streamSize = 0;
source.on('data', data => {
// Update size, check no over read.
streamSize += data.length;
if (size !== null && streamSize > size) {
source.emit('error', new Error(`Read size too large: ${streamSize}`));
} // Update hashers.
hashers.forEach(entry => entry.hasher.update(data));
if (!onData) {
return;
}
try {
onData(data);
} catch (err) {
source.emit('error', err);
}
});
await streamEndError(source, 'end'); // Verify size is not too small (too large is checked on data).
if (size !== null && streamSize < size) {
throw new Error(`Read size too small: ${streamSize}`);
}
for (const {
hash,
hasher
} of hashers) {
const {
algorithm,
encoding,
digest
} = hash;
const expectedV = hashNormalize(digest, encoding);
const hashed = hasher.digest(encoding);
const hashedV = hashNormalize(hashed, encoding);
if (hashedV === expectedV) {
continue;
}
throw new Error(`Invalid ${algorithm} hash: ${hashedV} expected: ${expectedV}`);
}
}
/**
* Stream a request stream to a specified directory.
*
* @param source Request stream.
* @param size Expected size.
* @param hashes Expected hashes.
* @param onResponse Response event handler, can throw to cancel download.
* @param onData Data event handler, can throw to cancel download.
*/
async function streamRequest(source, size = null, hashes = null, onResponse = null, onData = null) {
source.on('response', response => {
try {
if (onResponse) {
onResponse(response);
}
} catch (err) {
source.emit('error', err);
return;
}
});
source.on('error', () => {
source.abort();
});
await streamVerify(source, 'end', size, hashes, onData);
}
/**
* Write a request stream to a specified file.
*
* @param source Request stream.
* @param path File path.
* @param size Expected size.
* @param hashes Expected hashes.
* @param onResponse Response event handler, can throw to cancel download.
* @param onData Data event handler, can throw to cancel download.
*/
async function streamRequestDownload(source, path, size = null, hashes = null, onResponse = null, onData = null) {
const write = _fsExtra.default.createWriteStream(path, {
encoding: 'binary'
});
const written = streamEndError(write, 'close');
source.pipe(write);
await streamRequest(source, size, hashes, onResponse, onData);
await written;
}
/**
* Write a ZIP entry to a specified file.
*
* @param entry ZIP entry.
* @param path File path.
* @param size Expected size.
* @param hashes Expected hashes.
* @param onData Data event handler, can throw to cancel download.
*/
async function zipEntryExtract(entry, path, size = null, hashes = null, onData = null) {
const {
sizeD
} = entry;
if (size === null) {
size = sizeD;
} else if (sizeD !== size) {
throw new Error(`Unexpected extract size: ${sizeD}`);
}
const source = await entry.stream();
const write = _fsExtra.default.createWriteStream(path, {
encoding: 'binary'
});
const written = streamEndError(write, 'close');
source.pipe(write);
await streamVerify(source, 'end', size, hashes, onData);
await written;
}
//# sourceMappingURL=util.js.map