UNPKG

@etothepii/satisfactory-file-parser

Version:

A file parser for satisfactory files. Includes save files and blueprint files.

148 lines (147 loc) 7.07 kB
"use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.BlueprintConfigReader = exports.BlueprintReader = void 0; const pako_1 = __importDefault(require("pako")); const alignment_enum_1 = require("../../byte/alignment.enum"); const byte_reader_class_1 = require("../../byte/byte-reader.class"); const parser_error_1 = require("../../error/parser.error"); const SaveComponent_1 = require("../objects/SaveComponent"); const SaveEntity_1 = require("../objects/SaveEntity"); const level_class_1 = require("../save/level.class"); const save_reader_1 = require("../save/save-reader"); const util_types_1 = require("../structs/util.types"); class BlueprintReader extends byte_reader_class_1.ByteReader { constructor(bluePrintBuffer) { super(bluePrintBuffer, alignment_enum_1.Alignment.LITTLE_ENDIAN); this.compressionInfo = { packageFileTag: 0, maxUncompressedChunkContentSize: 0, chunkHeaderSize: save_reader_1.DEFAULT_SATISFACTORY_CHUNK_HEADER_SIZE }; } static ReadHeader(reader) { const blueprintTypeOrWhat = reader.readBytes(4); const versionThing = reader.readBytes(2 * 4); const dimensions = (0, util_types_1.ParseVec3Int)(reader); let itemTypeCount = reader.readInt32(); const itemCosts = new Array(itemTypeCount).fill(['', 0]); for (let i = 0; i < itemTypeCount; i++) { let indexOrWhat = reader.readInt32(); let itemPathName = reader.readString(); let itemCount = reader.readInt32(); itemCosts[i] = [itemPathName, itemCount]; } let recipeCount = reader.readInt32(); const recipeRefs = new Array(recipeCount).fill(''); for (let i = 0; i < recipeCount; i++) { let indexOrWhat = reader.readInt32(); const recipeName = reader.readString(); recipeRefs[i] = recipeName; } return { designerDimension: dimensions, recipeReferences: recipeRefs, itemCosts }; } inflateChunks() { this.fileBuffer = this.fileBuffer.slice(this.currentByte); this.handledByte = 0; this.currentByte = 0; this.maxByte = this.fileBuffer.byteLength; let currentChunks = []; let totalUncompressedBodySize = 0; while (this.handledByte < this.maxByte) { let chunkHeader = new DataView(this.fileBuffer.slice(0, this.compressionInfo.chunkHeaderSize)); this.currentByte = this.compressionInfo.chunkHeaderSize; this.handledByte += this.compressionInfo.chunkHeaderSize; if (this.compressionInfo.packageFileTag <= 0) { this.compressionInfo.packageFileTag = chunkHeader.getUint32(0, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); } if (this.compressionInfo.maxUncompressedChunkContentSize <= 0) { this.compressionInfo.maxUncompressedChunkContentSize = chunkHeader.getInt32(8, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); } const chunkCompressedLength = chunkHeader.getInt32(33, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); const chunkUncompressedLength = chunkHeader.getInt32(25, this.alignment === alignment_enum_1.Alignment.LITTLE_ENDIAN); totalUncompressedBodySize += chunkUncompressedLength; const currentChunkSize = chunkCompressedLength; let currentChunk = this.fileBuffer.slice(this.currentByte, this.currentByte + currentChunkSize); this.handledByte += currentChunkSize; this.currentByte += currentChunkSize; this.fileBuffer = this.fileBuffer.slice(this.currentByte); this.currentByte = 0; try { let currentInflatedChunk = null; currentInflatedChunk = pako_1.default.inflate(currentChunk); currentChunks.push(currentInflatedChunk); } catch (err) { throw new parser_error_1.ParserError('ParserError', 'An error occurred while calling pako inflate.' + err); } } let newChunkLength = currentChunks.map(cc => cc.length).reduce((prev, cur) => prev + cur); const bigWholeChunk = new Uint8Array(newChunkLength); let currentLength = 0; for (let i = 0; i < currentChunks.length; i++) { bigWholeChunk.set(currentChunks[i], currentLength); currentLength += currentChunks[i].length; } this.currentByte = 0; this.maxByte = bigWholeChunk.buffer.byteLength; this.bufferView = new DataView(bigWholeChunk.buffer); return { newChunkLength, numChunks: currentChunks.length, inflatedData: bigWholeChunk }; } static ParseObjects(reader) { const totalBodyRestSize = reader.readInt32(); const objectHeadersBinarySize = reader.readInt32(); let objects = []; level_class_1.Level.ReadObjectHeaders(reader, objects, () => { }); const someChecksumThing = reader.readInt32(); BlueprintReader.ReadBlueprintObjectContents(reader, objects, 0); const pos = reader.getBufferPosition(); return objects; } static ReadBlueprintObjectContents(reader, objectsList, buildVersion) { const countEntities = reader.readInt32(); for (let i = 0; i < countEntities; i++) { const len = reader.readInt32(); if (len === 0) { throw new parser_error_1.CorruptSaveError(`check number is a wrong value (${len}). This normally indicates a corrupt entity or blueprint.`); } const obj = objectsList[i]; if ((0, SaveEntity_1.isSaveEntity)(obj)) { SaveEntity_1.SaveEntity.ParseData(obj, len, reader, buildVersion, obj.typePath); } else if ((0, SaveComponent_1.isSaveComponent)(obj)) { SaveComponent_1.SaveComponent.ParseData(obj, len, reader, buildVersion, obj.typePath); } } } } exports.BlueprintReader = BlueprintReader; class BlueprintConfigReader extends byte_reader_class_1.ByteReader { constructor(bluePrintConfigBuffer) { super(bluePrintConfigBuffer, alignment_enum_1.Alignment.LITTLE_ENDIAN); this.bluePrintConfigBuffer = bluePrintConfigBuffer; this.parse = () => BlueprintConfigReader.ParseConfig(this); } static ParseConfig(reader) { const unk = reader.readInt32(); const description = reader.readString(); const unk3 = reader.readInt32(); const colorMaybe = (0, util_types_1.ParseCol4RGBA)(reader); return { description, color: colorMaybe, iconID: unk3 }; } } exports.BlueprintConfigReader = BlueprintConfigReader;