@alinex/datastore
Version:
Read, work and write data structures from and to differents locations and formats.
78 lines • 2.9 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.compress = exports.uncompress = void 0;
const path_1 = require("path");
const debug_1 = require("debug");
const tar = require("tar-stream");
const streamBuffers = require("stream-buffers");
// @ts-ignore
const compressjs_1 = require("compressjs");
const core_1 = require("@alinex/core");
const debug = debug_1.default('datastore:compression:tar');
exports.uncompress = (async function (parsedUri, buffer, options) {
debug(`uncompressing ${path_1.basename(parsedUri.pathname)}`);
let archivePath = parsedUri.hash && parsedUri.hash.substr(1);
// uncompressing
buffer = Buffer.from(compressjs_1.Bzip2.decompressFile(buffer));
// buffer to stream
const writer = new streamBuffers.WritableStreamBuffer();
const reader = new streamBuffers.ReadableStreamBuffer();
reader.put(buffer);
reader.stop();
// stepping through archive
return new Promise(resolve => {
debug(`extracting ${archivePath}`);
var extract = tar.extract();
extract.on('finish', resolve);
extract.on('entry', function (header, stream, next) {
stream.on('end', next);
if (archivePath) {
if (header.name == archivePath) {
// reading file
stream.pipe(writer);
}
else {
stream.resume(); // just auto drain the stream
}
}
else {
archivePath = header.name;
parsedUri.hash = `#${archivePath}`;
stream.pipe(writer);
}
});
reader.pipe(extract);
}).then(() => {
// return result
const result = writer.getContents();
if (!result)
throw new Error('Found no content in archive.');
return result;
});
});
exports.compress = (async function (parsedUri, buffer, options) {
debug(`compressing ${path_1.basename(parsedUri.pathname)}`);
if (!parsedUri.hash)
throw new core_1.ExitError('No file specified in hash part of URL.');
let archivePath = parsedUri.hash && parsedUri.hash.substr(1);
// buffer to stream
const writer = new streamBuffers.WritableStreamBuffer();
// creating archive
buffer = await new Promise(resolve => {
debug(`including as ${archivePath}`);
writer.on('finish', resolve);
var pack = tar.pack();
pack.entry({ name: archivePath }, buffer);
pack.finalize();
pack.pipe(writer);
}).then(() => {
// return result
const result = writer.getContents();
if (!result)
throw new Error('Found no content in archive.');
return result;
});
// compressing
return Buffer.from(compressjs_1.Bzip2.compressFile(buffer));
});
//# sourceMappingURL=tbz2.js.map