@etothepii/satisfactory-file-parser
Version:
A file parser for satisfactory files. Includes save files and blueprint files.
205 lines (204 loc) • 9.63 kB
JavaScript
var _a;
Object.defineProperty(exports, "__esModule", { value: true });
exports.ReadableStreamParser = void 0;
const web_1 = require("stream/web");
const SaveComponent_1 = require("../../satisfactory/objects/SaveComponent");
const SaveEntity_1 = require("../../satisfactory/objects/SaveEntity");
const level_class_1 = require("../../satisfactory/save/level.class");
const satisfactory_save_1 = require("../../satisfactory/save/satisfactory-save");
const save_reader_1 = require("../../satisfactory/save/save-reader");
const DEFAULT_BYTE_HIGHWATERMARK = 1024 * 1024 * 200;
const createStringLengthQueuingStrategy = (highWaterMark = DEFAULT_BYTE_HIGHWATERMARK / 4) => ({
highWaterMark,
size: (chunk) => {
if (chunk === undefined) {
return 0;
}
return chunk.length;
}
});
class SimpleWaitForConsumerLock {
constructor() {
this.locked = false;
this.unlockWasCalledInTheMeantime = false;
this.executionFn = undefined;
}
lock(executionFn) {
this.executionFn = executionFn;
this.locked = true;
if (this.unlockWasCalledInTheMeantime) {
this.unlock();
}
}
unlock() {
this.locked = false;
if (this.executionFn) {
this.executionFn();
this.executionFn = undefined;
}
else {
this.unlockWasCalledInTheMeantime = true;
}
}
}
;
class ReadableStreamParser {
static async WriteLevels(write, reader, mapName, buildVersion) {
const batchingSizeOfObjects = 1000;
const thresholdOfWrittenObjectsUntilWaitingForConsumerAgain = 3 * batchingSizeOfObjects;
const levelCount = reader.readInt32();
reader.onProgressCallback(reader.getBufferProgress(), `reading pack of ${levelCount + 1} levels.`);
let writtenTotalObjectsSinceConsumerSync = 0;
for (let j = 0; j <= levelCount; j++) {
let levelName = (j === levelCount) ? '' + mapName : reader.readString();
if (j % 500 === 0) {
reader.onProgressCallback(reader.getBufferProgress(), `reading level [${(j + 1)}/${(levelCount + 1)}] ${levelName}`);
}
await write(`${j > 0 ? ', ' : ''}{"name": "${levelName}", "objects": [`, false);
const headersBinLen = reader.readInt32();
const unk = reader.readInt32();
const posBeforeHeaders = reader.getBufferPosition();
const afterAllHeaders = posBeforeHeaders + headersBinLen;
let countObjectHeaders = reader.readInt32();
let totalReadObjectsInLevel = 0;
let writtenObjectsInLevel = 0;
let afterHeadersOfBatch = reader.getBufferPosition();
let afterObjectsOfBatch = -1;
do {
reader.skipBytes(afterHeadersOfBatch - reader.getBufferPosition());
const objectCountToRead = Math.min(countObjectHeaders - totalReadObjectsInLevel, batchingSizeOfObjects);
const objects = ReadableStreamParser.ReadNObjectHeaders(reader, objectCountToRead);
afterHeadersOfBatch = reader.getBufferPosition();
if (totalReadObjectsInLevel === 0) {
reader.skipBytes(afterAllHeaders - reader.getBufferPosition());
const objectContentsBinLen = reader.readInt32();
const unk2 = reader.readInt32();
const posBeforeContents = reader.getBufferPosition();
const countEntities = reader.readInt32();
afterObjectsOfBatch = reader.getBufferPosition();
}
else {
reader.skipBytes(afterObjectsOfBatch - reader.getBufferPosition());
}
ReadableStreamParser.ReadNObjects(reader, objectCountToRead, objects, buildVersion);
afterObjectsOfBatch = reader.getBufferPosition();
totalReadObjectsInLevel += objectCountToRead;
if (countObjectHeaders > 10000 && totalReadObjectsInLevel % 10000 === 0) {
reader.onProgressCallback(reader.getBufferProgress(), `read object count [${(totalReadObjectsInLevel + 1)}/${(countObjectHeaders + 1)}] in level ${levelName}`);
}
let shouldWait = false;
if (writtenTotalObjectsSinceConsumerSync >= thresholdOfWrittenObjectsUntilWaitingForConsumerAgain) {
shouldWait = true;
writtenTotalObjectsSinceConsumerSync = 0;
}
await write(`${writtenObjectsInLevel > 0 ? ', ' : ''}${objects.map(obj => JSON.stringify(obj)).join(', ')}`, shouldWait);
writtenTotalObjectsSinceConsumerSync += objectCountToRead;
writtenObjectsInLevel += objectCountToRead;
} while (totalReadObjectsInLevel < countObjectHeaders);
await write('], "collectables": [', false);
const collectables = level_class_1.Level.ReadCollectablesList(reader);
await write(`${collectables.map(obj => JSON.stringify(obj)).join(', ')}`, true);
await write(']', false);
await write('}', false);
}
}
}
_a = ReadableStreamParser;
ReadableStreamParser.CreateReadableStreamForParsingSave = (onStart, onCancel, onPullRequest, highWaterMark = DEFAULT_BYTE_HIGHWATERMARK / 4) => {
let ourController = null;
const stream = new web_1.ReadableStream({
start: (controller) => {
ourController = controller;
onStart(ourController);
},
pull: (controller) => {
onPullRequest(ourController.desiredSize ?? 1);
},
cancel: (reason) => {
console.warn('parsing stream was canceled!', reason);
if (ourController !== null) {
ourController.close();
}
onCancel(reason);
}
}, createStringLengthQueuingStrategy(highWaterMark));
const finish = () => {
if (ourController !== null) {
ourController?.close();
}
};
return { stream, controller: ourController, finish };
};
ReadableStreamParser.CreateReadableStreamFromSaveToJson = (name, bytes, onDecompressedSaveBody = () => { }, onProgress = () => { }) => {
const waitForConsumerLock = new SimpleWaitForConsumerLock();
const waitForConsumer = async () => {
return new Promise((resolve, reject) => {
waitForConsumerLock.lock(resolve);
});
};
const { stream, controller, finish } = ReadableStreamParser.CreateReadableStreamForParsingSave((controller) => {
}, (reason) => { }, (desiredSize) => {
waitForConsumerLock.unlock();
});
const write = async (value, waitForConsumerToBeReady = true) => {
if (waitForConsumerToBeReady) {
await waitForConsumer();
}
controller.enqueue(value);
};
const startStreaming = async () => {
const reader = new save_reader_1.SaveReader(bytes.buffer, onProgress);
const header = reader.readHeader();
const save = new satisfactory_save_1.SatisfactorySave(name, header);
const inflateResult = reader.inflateChunks();
onDecompressedSaveBody(reader.getBuffer());
const gridHash = reader.readSaveBodyHash();
const grids = reader.readGrids();
await ReadableStreamParser.WriteHeaderAndGrids(write, reader.compressionInfo, header, grids, gridHash);
await ReadableStreamParser.WriteLevels(write, reader, save.header.mapName, save.header.buildVersion);
await write(`]}`);
finish();
};
return { stream, startStreaming };
};
ReadableStreamParser.WriteHeaderAndGrids = async (write, compressionInfo, header, grids, gridHash) => {
return write(`{"header": ${JSON.stringify(header)}, "compressionInfo": ${JSON.stringify(compressionInfo)}, "gridHash": ${JSON.stringify(gridHash)}, "grids": ${JSON.stringify(grids)}, "levels": [`, false);
};
ReadableStreamParser.ReadNObjectHeaders = (reader, count) => {
let objects = [];
let objectsRead = 0;
for (; objectsRead < count; objectsRead++) {
let obj;
let objectType = reader.readInt32();
switch (objectType) {
case SaveEntity_1.SaveEntity.TypeID:
obj = new SaveEntity_1.SaveEntity('', '', '', '');
SaveEntity_1.SaveEntity.ParseHeader(reader, obj);
break;
case SaveComponent_1.SaveComponent.TypeID:
obj = new SaveComponent_1.SaveComponent('', '', '', '');
SaveComponent_1.SaveComponent.ParseHeader(reader, obj);
break;
default:
throw new Error('Unknown object type' + objectType);
}
objects.push(obj);
}
return objects;
};
ReadableStreamParser.ReadNObjects = (reader, count, objects, buildVersion) => {
for (let i = 0; i < count; i++) {
objects[i].saveOrBlueprintIndicator = reader.readInt32();
objects[i].unknownType2 = reader.readInt32();
const binarySize = reader.readInt32();
const before = reader.getBufferPosition();
if ((0, SaveEntity_1.isSaveEntity)(objects[i])) {
SaveEntity_1.SaveEntity.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
}
else if ((0, SaveComponent_1.isSaveComponent)(objects[i])) {
SaveComponent_1.SaveComponent.ParseData(objects[i], binarySize, reader, buildVersion, objects[i].typePath);
}
}
};
exports.ReadableStreamParser = ReadableStreamParser;
;