@nraynaud/xo-vmdk-to-vhd
Version:
JS lib streaming a vmdk file to a vhd
643 lines (516 loc) • 22 kB
JavaScript
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.readRawContent = exports.VMDKDirectParser = undefined;
var _promise = require('babel-runtime/core-js/promise');
var _promise2 = _interopRequireDefault(_promise);
var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');
var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
var _createClass2 = require('babel-runtime/helpers/createClass');
var _createClass3 = _interopRequireDefault(_createClass2);
var _regenerator = require('babel-runtime/regenerator');
var _regenerator2 = _interopRequireDefault(_regenerator);
var _asyncToGenerator2 = require('babel-runtime/helpers/asyncToGenerator');
var _asyncToGenerator3 = _interopRequireDefault(_asyncToGenerator2);
var _getIterator2 = require('babel-runtime/core-js/get-iterator');
var _getIterator3 = _interopRequireDefault(_getIterator2);
var readGrain = function () {
var _ref = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee(offsetSectors, buffer, compressed) {
var offset, size, grainBuffer, grainContent, lba;
return _regenerator2.default.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
offset = offsetSectors * sectorSize;
size = buffer.readUInt32LE(offset + 8);
grainBuffer = buffer.slice(offset + 12, offset + 12 + size);
if (!compressed) {
_context.next = 9;
break;
}
_context.next = 6;
return _zlib2.default.inflateSync(grainBuffer);
case 6:
_context.t0 = _context.sent;
_context.next = 10;
break;
case 9:
_context.t0 = grainBuffer;
case 10:
grainContent = _context.t0;
lba = parseU64b(buffer, offset, 'l2Lba');
return _context.abrupt('return', {
offsetSectors: offsetSectors,
offset: offset,
lba: lba,
lbaBytes: lba * sectorSize,
size: size,
buffer: grainBuffer,
grain: grainContent,
grainSize: grainContent.byteLength
});
case 13:
case 'end':
return _context.stop();
}
}
}, _callee, this);
}));
return function readGrain(_x, _x2, _x3) {
return _ref.apply(this, arguments);
};
}();
var readRawContent = exports.readRawContent = function () {
var _ref5 = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee5(readStream) {
var virtualBuffer, headerBuffer, header, descriptorLength, descriptorBuffer, descriptor, remainingBuffer, buffer, rawOutputBuffer, l1Size, l2Size, l1, i, l1Entry, l2, j, l2Entry, grain, vmdkType;
return _regenerator2.default.wrap(function _callee5$(_context5) {
while (1) {
switch (_context5.prev = _context5.next) {
case 0:
virtualBuffer = new _virtualBuffer.VirtualBuffer(readStream);
_context5.next = 3;
return virtualBuffer.readChunk(512, 'header');
case 3:
headerBuffer = _context5.sent;
header = parseHeader(headerBuffer);
// I think the multiplications are OK, because the descriptor is always at the beginning of the file
descriptorLength = header.descriptorSizeSectors * sectorSize;
_context5.next = 8;
return virtualBuffer.readChunk(descriptorLength, 'descriptor');
case 8:
descriptorBuffer = _context5.sent;
descriptor = parseDescriptor(descriptorBuffer);
// TODO: we concat them back for now so that the indices match, we'll have to introduce a bias later
_context5.next = 12;
return virtualBuffer.readChunk(-1, 'remainder');
case 12:
remainingBuffer = _context5.sent;
buffer = Buffer.concat([headerBuffer, descriptorBuffer, remainingBuffer]);
if (header.grainDirectoryOffsetSectors === -1) {
header = parseHeader(buffer.slice(-1024, -1024 + sectorSize));
}
rawOutputBuffer = new Buffer(header.capacitySectors * sectorSize);
rawOutputBuffer.fill(0);
l1Size = Math.floor((header.capacitySectors + header.l1EntrySectors - 1) / header.l1EntrySectors);
l2Size = header.numGTEsPerGT;
l1 = [];
i = 0;
case 21:
if (!(i < l1Size)) {
_context5.next = 41;
break;
}
l1Entry = buffer.readUInt32LE(header.grainDirectoryOffsetSectors * sectorSize + 4 * i);
if (!(l1Entry !== 0)) {
_context5.next = 38;
break;
}
l1.push(l1Entry);
l2 = [];
j = 0;
case 27:
if (!(j < l2Size)) {
_context5.next = 38;
break;
}
l2Entry = buffer.readUInt32LE(l1Entry * sectorSize + 4 * j);
if (!(l2Entry !== 0 && l2Entry !== 1)) {
_context5.next = 35;
break;
}
_context5.next = 32;
return readGrain(l2Entry, buffer, header['flags']['compressedGrains']);
case 32:
grain = _context5.sent;
grain.grain.copy(rawOutputBuffer, grain.lba * sectorSize);
l2[j] = grain;
case 35:
j++;
_context5.next = 27;
break;
case 38:
i++;
_context5.next = 21;
break;
case 41:
vmdkType = descriptor['descriptor']['createType'];
if (!(!vmdkType || vmdkType.toLowerCase() !== 'streamOptimized'.toLowerCase())) {
_context5.next = 44;
break;
}
throw new Error('unsupported VMDK type "' + vmdkType + '", only streamOptimized is supported');
case 44:
return _context5.abrupt('return', { descriptor: descriptor.descriptor, extents: descriptor.extents, rawFile: rawOutputBuffer });
case 45:
case 'end':
return _context5.stop();
}
}
}, _callee5, this);
}));
return function readRawContent(_x4) {
return _ref5.apply(this, arguments);
};
}();
var _zlib = require('zlib');
var _zlib2 = _interopRequireDefault(_zlib);
var _virtualBuffer = require('./virtual-buffer');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var sectorSize = 512;
var compressionDeflate = 'COMPRESSION_DEFLATE';
var compressionNone = 'COMPRESSION_NONE';
var compressionMap = [compressionNone, compressionDeflate];
function parseS64b(buffer, offset, valueName) {
var low = buffer.readInt32LE(offset);
var high = buffer.readInt32LE(offset + 4);
// here there might be a surprise because we are reading 64 integers into double floats (53 bits mantissa)
var value = low | high << 32;
if ((value & Math.pow(2, 32) - 1) !== low) {
throw new Error('Unsupported VMDK, ' + valueName + ' is too big');
}
return value;
}
function parseU64b(buffer, offset, valueName) {
var low = buffer.readUInt32LE(offset);
var high = buffer.readUInt32LE(offset + 4);
// here there might be a surprise because we are reading 64 integers into double floats (53 bits mantissa)
var value = low | high << 32;
if ((value & Math.pow(2, 32) - 1) !== low) {
throw new Error('Unsupported VMDK, ' + valueName + ' is too big');
}
return value;
}
function parseDescriptor(descriptorSlice) {
var descriptorText = descriptorSlice.toString('ascii').replace(/\x00+$/, '');
var descriptorDict = {};
var extentList = [];
var lines = descriptorText.split(/\r?\n/).filter(function (line) {
return line.trim().length > 0 && line[0] !== '#';
});
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = (0, _getIterator3.default)(lines), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var line = _step.value;
var defLine = line.split('=');
// the wonky quote test is to avoid having an equal sign in the name of an extent
if (defLine.length === 2 && defLine[0].indexOf('"') === -1) {
descriptorDict[defLine[0]] = defLine[1].replace(/['"]+/g, '');
} else {
var items = line.split(' ');
extentList.push({
access: items[0],
sizeSectors: items[1],
type: items[2],
name: items[3],
offset: items.length > 4 ? items[4] : 0
});
}
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
return { descriptor: descriptorDict, extents: extentList };
}
function parseFlags(flagBuffer) {
var number = flagBuffer.readUInt32LE(0);
return {
newLineTest: !!(number & 1 << 0),
useSecondaryGrain: !!(number & 1 << 1),
useZeroedGrainTable: !!(number & 1 << 2),
compressedGrains: !!(number & 1 << 16),
hasMarkers: !!(number & 1 << 17)
};
}
function parseHeader(buffer) {
var magicString = buffer.slice(0, 4).toString('ascii');
if (magicString !== 'KDMV') {
throw new Error('not a VMDK file');
}
var version = buffer.readUInt32LE(4);
if (version !== 1 && version !== 3) {
throw new Error('unsupported VMDK version ' + version + ', only version 1 and 3 are supported');
}
var flags = parseFlags(buffer.slice(8, 12));
var capacitySectors = parseU64b(buffer, 12, 'capacitySectors');
var grainSizeSectors = parseU64b(buffer, 20, 'grainSizeSectors');
var descriptorOffsetSectors = parseU64b(buffer, 28, 'descriptorOffsetSectors');
var descriptorSizeSectors = parseU64b(buffer, 36, 'descriptorSizeSectors');
var numGTEsPerGT = buffer.readUInt32LE(44);
var rGrainDirectoryOffsetSectors = parseS64b(buffer, 48, 'rGrainDirectoryOffsetSectors');
var grainDirectoryOffsetSectors = parseS64b(buffer, 56, 'grainDirectoryOffsetSectors');
var overheadSectors = parseS64b(buffer, 64, 'overheadSectors');
var compressionMethod = compressionMap[buffer.readUInt16LE(77)];
var l1EntrySectors = numGTEsPerGT * grainSizeSectors;
return {
flags: flags,
compressionMethod: compressionMethod,
grainSizeSectors: grainSizeSectors,
overheadSectors: overheadSectors,
capacitySectors: capacitySectors,
descriptorOffsetSectors: descriptorOffsetSectors,
descriptorSizeSectors: descriptorSizeSectors,
grainDirectoryOffsetSectors: grainDirectoryOffsetSectors,
rGrainDirectoryOffsetSectors: rGrainDirectoryOffsetSectors,
l1EntrySectors: l1EntrySectors,
numGTEsPerGT: numGTEsPerGT
};
}
function tryToParseMarker(buffer) {
var value = buffer.readUInt32LE(0);
var size = buffer.readUInt32LE(8);
var type = buffer.readUInt32LE(12);
return { value: value, size: size, type: type };
}
function alignSectors(number) {
return Math.ceil(number / sectorSize) * sectorSize;
}
var VMDKDirectParser = exports.VMDKDirectParser = function () {
function VMDKDirectParser(readStream) {
(0, _classCallCheck3.default)(this, VMDKDirectParser);
this.virtualBuffer = new _virtualBuffer.VirtualBuffer(readStream);
this.header = null;
}
// I found a VMDK file whose L1 and L2 table did not have a marker, but they were at the top
// I detect this case and eat those tables first then let the normal loop go over the grains.
(0, _createClass3.default)(VMDKDirectParser, [{
key: '_readL1',
value: function () {
var _ref2 = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee2() {
var position, l1entries, sectorAlignedL1Bytes, l1Buffer, l2Start, l2IsContiguous, i, l1Entry, previousL1Entry, l1L2FreeSpace, l2entries, l2ByteSize, l2Buffer, grainsAreInAscendingOrder, previousL2Entry, firstGrain, _i, l2Entry, freeSpace;
return _regenerator2.default.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
position = this.virtualBuffer.position;
l1entries = Math.floor((this.header.capacitySectors + this.header.l1EntrySectors - 1) / this.header.l1EntrySectors);
sectorAlignedL1Bytes = alignSectors(l1entries * 4);
_context2.next = 5;
return this.virtualBuffer.readChunk(sectorAlignedL1Bytes, 'L1 table ' + position);
case 5:
l1Buffer = _context2.sent;
l2Start = 0;
l2IsContiguous = true;
for (i = 0; i < l1entries; i++) {
l1Entry = l1Buffer.readUInt32LE(i * 4);
if (i > 0) {
previousL1Entry = l1Buffer.readUInt32LE((i - 1) * 4);
l2IsContiguous = l2IsContiguous && l1Entry - previousL1Entry === 4;
} else {
l2IsContiguous = l1Entry * sectorSize === this.virtualBuffer.position || l1Entry * sectorSize === this.virtualBuffer.position + 512;
l2Start = l1Entry * sectorSize;
}
}
if (l2IsContiguous) {
_context2.next = 11;
break;
}
return _context2.abrupt('return', null);
case 11:
l1L2FreeSpace = l2Start - this.virtualBuffer.position;
if (!(l1L2FreeSpace > 0)) {
_context2.next = 15;
break;
}
_context2.next = 15;
return this.virtualBuffer.readChunk(l1L2FreeSpace, 'freeSpace between L1 and L2');
case 15:
l2entries = Math.ceil(this.header.capacitySectors / this.header.grainSizeSectors);
l2ByteSize = alignSectors(l1entries * this.header.numGTEsPerGT * 4);
_context2.next = 19;
return this.virtualBuffer.readChunk(l2ByteSize, 'L2 table ' + position);
case 19:
l2Buffer = _context2.sent;
grainsAreInAscendingOrder = true;
previousL2Entry = 0;
firstGrain = null;
for (_i = 0; _i < l2entries; _i++) {
l2Entry = l2Buffer.readUInt32LE(_i * 4);
if (_i > 0 && previousL2Entry !== 0 && l2Entry !== 0) {
grainsAreInAscendingOrder = grainsAreInAscendingOrder && previousL2Entry < l2Entry;
}
previousL2Entry = l2Entry;
if (firstGrain === null) {
firstGrain = l2Entry;
}
}
if (grainsAreInAscendingOrder) {
_context2.next = 26;
break;
}
throw new Error('Unsupported file format');
case 26:
freeSpace = firstGrain * sectorSize - this.virtualBuffer.position;
if (!(freeSpace > 0)) {
_context2.next = 30;
break;
}
_context2.next = 30;
return this.virtualBuffer.readChunk(freeSpace, 'freeSpace after L2');
case 30:
case 'end':
return _context2.stop();
}
}
}, _callee2, this);
}));
function _readL1() {
return _ref2.apply(this, arguments);
}
return _readL1;
}()
}, {
key: 'readHeader',
value: function () {
var _ref3 = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee3() {
var headerBuffer, magicString, version, descriptorLength, descriptorBuffer, l1PositionBytes, endOfDescriptor;
return _regenerator2.default.wrap(function _callee3$(_context3) {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
_context3.next = 2;
return this.virtualBuffer.readChunk(512, 'readHeader');
case 2:
headerBuffer = _context3.sent;
magicString = headerBuffer.slice(0, 4).toString('ascii');
if (!(magicString !== 'KDMV')) {
_context3.next = 6;
break;
}
throw new Error('not a VMDK file');
case 6:
version = headerBuffer.readUInt32LE(4);
if (!(version !== 1 && version !== 3)) {
_context3.next = 9;
break;
}
throw new Error('unsupported VMDK version ' + version + ', only version 1 and 3 are supported');
case 9:
this.header = parseHeader(headerBuffer);
// I think the multiplications are OK, because the descriptor is always at the beginning of the file
descriptorLength = this.header.descriptorSizeSectors * sectorSize;
_context3.next = 13;
return this.virtualBuffer.readChunk(descriptorLength, 'descriptor');
case 13:
descriptorBuffer = _context3.sent;
this.descriptor = parseDescriptor(descriptorBuffer);
l1PositionBytes = null;
if (this.header.grainDirectoryOffsetSectors !== -1 && this.header.grainDirectoryOffsetSectors !== 0) {
l1PositionBytes = this.header.grainDirectoryOffsetSectors * sectorSize;
}
endOfDescriptor = this.virtualBuffer.position;
if (!(l1PositionBytes !== null && (l1PositionBytes === endOfDescriptor || l1PositionBytes === endOfDescriptor + sectorSize))) {
_context3.next = 24;
break;
}
if (!(l1PositionBytes === endOfDescriptor + sectorSize)) {
_context3.next = 22;
break;
}
_context3.next = 22;
return this.virtualBuffer.readChunk(sectorSize, 'skipping L1 marker');
case 22:
_context3.next = 24;
return this._readL1();
case 24:
return _context3.abrupt('return', this.header);
case 25:
case 'end':
return _context3.stop();
}
}
}, _callee3, this);
}));
function readHeader() {
return _ref3.apply(this, arguments);
}
return readHeader;
}()
}, {
key: 'next',
value: function () {
var _ref4 = (0, _asyncToGenerator3.default)(_regenerator2.default.mark(function _callee4() {
var position, sector, marker, grainDiskSize, alignedGrainDiskSize, remainOfBufferSize, remainderOfGrainBuffer, grainBuffer;
return _regenerator2.default.wrap(function _callee4$(_context4) {
while (1) {
switch (_context4.prev = _context4.next) {
case 0:
if (this.virtualBuffer.isDepleted) {
_context4.next = 25;
break;
}
position = this.virtualBuffer.position;
_context4.next = 4;
return this.virtualBuffer.readChunk(512, 'marker start ' + position);
case 4:
sector = _context4.sent;
if (!(sector.length === 0)) {
_context4.next = 7;
break;
}
return _context4.abrupt('break', 25);
case 7:
marker = tryToParseMarker(sector);
if (!(marker.size === 0)) {
_context4.next = 14;
break;
}
if (!(marker.value !== 0)) {
_context4.next = 12;
break;
}
_context4.next = 12;
return this.virtualBuffer.readChunk(marker.value * sectorSize, 'other marker value ' + this.virtualBuffer.position);
case 12:
_context4.next = 23;
break;
case 14:
if (!(marker.size > 10)) {
_context4.next = 23;
break;
}
grainDiskSize = marker.size + 12;
alignedGrainDiskSize = alignSectors(grainDiskSize);
remainOfBufferSize = alignedGrainDiskSize - sectorSize;
_context4.next = 20;
return this.virtualBuffer.readChunk(remainOfBufferSize, 'grain remainder ' + this.virtualBuffer.position);
case 20:
remainderOfGrainBuffer = _context4.sent;
grainBuffer = Buffer.concat([sector, remainderOfGrainBuffer]);
return _context4.abrupt('return', readGrain(0, grainBuffer, this.header.compressionMethod === compressionDeflate && this.header.flags.compressedGrains));
case 23:
_context4.next = 0;
break;
case 25:
return _context4.abrupt('return', new _promise2.default(function (resolve) {
return resolve(null);
}));
case 26:
case 'end':
return _context4.stop();
}
}
}, _callee4, this);
}));
function next() {
return _ref4.apply(this, arguments);
}
return next;
}()
}]);
return VMDKDirectParser;
}();
//# sourceMappingURL=vmdk-read.js.map
;