@etothepii/satisfactory-file-parser
Version:
A file parser for satisfactory files. Includes save files and blueprint files.
148 lines (147 loc) • 7.59 kB
JavaScript
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.SaveWriter = void 0;
const pako_1 = __importDefault(require("pako"));
const alignment_enum_1 = require("../../byte/alignment.enum");
const byte_writer_class_1 = require("../../byte/byte-writer.class");
const parser_error_1 = require("../../error/parser.error");
const file_types_1 = require("../../file.types");
const MD5Hash_1 = require("../objects/ue/MD5Hash");
const level_class_1 = require("./level.class");
const save_reader_1 = require("./save-reader");
class SaveWriter extends byte_writer_class_1.ByteWriter {
constructor() {
super(alignment_enum_1.Alignment.LITTLE_ENDIAN);
}
static WriteHeader(writer, header) {
writer.writeInt32(header.saveHeaderType);
writer.writeInt32(header.saveVersion);
writer.writeInt32(header.buildVersion);
writer.writeString(header.mapName);
writer.writeString(header.mapOptions);
writer.writeString(header.sessionName);
writer.writeInt32(header.playDurationSeconds);
writer.writeInt64(BigInt(header.saveDateTime) * 10000n + save_reader_1.SaveReader.EPOCH_TICKS);
writer.writeByte(header.sessionVisibility);
if (header.saveHeaderType >= 7) {
writer.writeInt32(header.fEditorObjectVersion);
}
if (header.saveHeaderType >= 8) {
if (header.modMetadata) {
writer.writeString(JSON.stringify(header.modMetadata));
}
else {
writer.writeString(header.rawModMetadataString);
}
writer.writeInt32(header.isModdedSave);
}
if (header.saveHeaderType >= 10) {
writer.writeString(header.saveIdentifier);
}
if (header.saveHeaderType >= 11) {
writer.writeInt32(header.partitionEnabledFlag ? 1 : 0);
}
if (header.saveHeaderType >= 12) {
MD5Hash_1.MD5Hash.write(writer, header.consistencyHashBytes);
}
if (header.saveHeaderType >= 13) {
writer.writeInt32(header.creativeModeEnabled ? 1 : 0);
}
if (header.saveVersion >= 21) {
}
else {
throw new parser_error_1.UnsupportedVersionError("The save version is too old to be supported currently.");
}
}
static WriteLevels(writer, save, buildVersion) {
writer.writeInt32(save.levels.length - 1);
for (const level of save.levels) {
if (level.name !== save.header.mapName) {
writer.writeString(level.name);
}
level_class_1.Level.WriteLevel(writer, level, buildVersion);
}
level_class_1.Level.SerializeCollectablesList(writer, save.trailingCollectedObjects ?? []);
}
static GenerateCompressedChunksFromData(bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, alignment = alignment_enum_1.Alignment.LITTLE_ENDIAN) {
const errors = [];
const totalUncompressedSize = bufferArray.byteLength;
const saveBody = new Uint8Array(bufferArray.byteLength + 8);
saveBody.set(new Uint8Array(bufferArray), 4);
const miniView = new DataView(saveBody.buffer);
miniView.setInt32(0, totalUncompressedSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
onBinaryBeforeCompressing(saveBody.buffer);
let handledByte = 0;
const chunkSummary = [];
while (handledByte < saveBody.byteLength) {
const uncompressedContentSize = Math.min(compressionInfo.maxUncompressedChunkContentSize, saveBody.byteLength - handledByte);
const uncompressedChunk = saveBody.buffer.slice(handledByte, handledByte + uncompressedContentSize);
let compressedChunk = new Uint8Array(0);
try {
compressedChunk = pako_1.default.deflate(uncompressedChunk);
}
catch (err) {
throw new parser_error_1.CompressionLibraryError("Could not compress save data. " + err);
}
const chunk = new Uint8Array(compressionInfo.chunkHeaderSize + compressedChunk.byteLength);
chunk.set(compressedChunk, compressionInfo.chunkHeaderSize);
const view = new DataView(chunk.buffer);
view.setInt32(0, compressionInfo.packageFileTag, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(4, 0x22222222, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(8, compressionInfo.maxUncompressedChunkContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(12, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setUint8(16, file_types_1.CompressionAlgorithmCode.ZLIB);
view.setInt32(17, compressedChunk.byteLength, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(21, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(25, uncompressedContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(29, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(33, compressedChunk.byteLength, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(37, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(41, uncompressedContentSize, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
view.setInt32(45, 0, alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN);
onChunk(chunk);
chunkSummary.push({
uncompressedSize: uncompressedContentSize + compressionInfo.chunkHeaderSize,
compressedSize: compressedChunk.byteLength + compressionInfo.chunkHeaderSize
});
handledByte += uncompressedContentSize;
}
return chunkSummary;
}
generateChunks(compressionInfo, posAfterHeader, onBinaryBeforeCompressing, onHeader, onChunk) {
if (posAfterHeader <= 0) {
throw new parser_error_1.ParserError('ParserError', 'Seems like this buffer has no header. Please write the header first before you can generate chunks.');
}
const header = new Uint8Array(this.bufferArray.slice(0, posAfterHeader));
onHeader(header);
this.bufferArray = this.bufferArray.slice(posAfterHeader);
const chunkSummary = SaveWriter.GenerateCompressedChunksFromData(this.bufferArray, compressionInfo, onBinaryBeforeCompressing, onChunk, this.alignment);
return chunkSummary;
}
}
SaveWriter.WriteSaveBodyHash = (writer, hash) => {
writer.writeInt32(0);
writer.writeInt32(6);
writer.writeString('None');
writer.writeInt32(0);
writer.writeBytesArray(hash);
writer.writeInt32(1);
writer.writeString('None');
};
SaveWriter.WriteGrids = (writer, grids) => {
for (const parentEntry of Object.entries(grids)) {
writer.writeInt32(parentEntry[1].checksum);
writer.writeString(parentEntry[0]);
writer.writeInt32(parentEntry[1].cellSize);
writer.writeUint32(parentEntry[1].gridHash);
for (const child of Object.entries(parentEntry[1].children)) {
writer.writeUint32(child[1]);
writer.writeString(child[0]);
}
}
writer.writeInt32(0);
};
exports.SaveWriter = SaveWriter;
;