sc4
Version:
A command line utility for automating SimCity 4 modding tasks & modifying savegames
296 lines (295 loc) • 11.8 kB
JavaScript
// # plugin-index.ts
import { LRUCache } from 'lru-cache';
import { FileType, DBPF, Entry, TGI, } from 'sc4/core';
import { TGIIndex, } from 'sc4/utils';
import { SmartBuffer } from 'smart-arraybuffer';
import buildFamilyIndex from './build-family-index.js';
import DirectoryScanOperation from './directory-scan-operation.js';
// # CorePluginIndex
// Contains the core functionality for a plugin index that is shared between
// Node and the browser.
// A plugin index is a data structure that scans a list of dbpf files and builds
// up an index of all the files in it by their TGI's.
export default class PluginIndex {
scan = [];
dbpfs = [];
entries = new TGIIndex();
families = new Map();
cache;
core = true;
constructor(opts) {
// By default we will look for .dat and .sc4* files. Nothing else need
// to be handled.
const { scan = '**/*.{dat,sc4model,sc4desc,sc4lot}', core = true, mem = 4 * 1024 ** 3, } = opts;
this.scan = [scan].flat();
this.core = core;
// Set up the cache that we'll use to free up memory of DBPF files
// that are not read often.
this.cache = new LRUCache({
maxSize: 0.5 * mem,
sizeCalculation(entry) {
if (!entry.buffer)
return 0;
return entry.buffer.byteLength;
},
dispose(entry) {
entry.free();
},
});
}
// ## get length()
get length() {
return this.entries?.length ?? 0;
}
// ## async build(opts)
// Asynchronously builds up the file index in the same way that SimCity 4
// does. This means that the *load order* of the files is important!
async build(opts = {}) {
const all = [];
const ops = [];
// If the installation folder is specified, read it in.
const { plugins = this.plugins, installation = this.installation, } = opts;
if (this.core && installation) {
const glob = this.createGlob(this.scan, installation);
const op = new DirectoryScanOperation(this, glob);
all.push(op.start());
ops.push(op);
}
// Same for the user plugins folder.
if (plugins) {
const glob = this.createGlob(this.scan, plugins);
const op = new DirectoryScanOperation(this, glob);
all.push(op.start());
ops.push(op);
}
// Start logging the progress now.
await Promise.all(ops.map(op => op.filesPromise.promise));
// Wait for everything to be read, and then build up the actual index.
const results = await Promise.all(all);
const flat = results.flat();
const entries = this.entries = new TGIIndex(flat.length);
for (let i = 0; i < entries.length; i++) {
entries[i] = flat[i];
}
entries.build();
return this;
}
// ## buildFamilies()
// Builds up the index of all building & prop families by reading in all
// exemplars.
async buildFamilies(opts = {}) {
this.families = await buildFamilyIndex(this, opts);
}
// ## touch(entry)
// This method puts the given entry on top of the LRU cache, which means
// that they will be registered as "last used" and hence are less likely to
// get kicked out of memory (once loaded of course).
touch(entry) {
if (entry) {
this.cache.set(entry.id, entry);
}
return entry;
}
find(...args) {
return this.entries.find(...args);
}
findAll(...args) {
return this.entries.findAll(...args);
}
// ## getFamilyTGIs(family)
getFamilyTGIs(family) {
return this.families.get(family) ?? [];
}
// ## family(id)
// Checks if the a prop or building family exists with the given IID and
// if so returns the family array.
family(family) {
let arr = this.getFamilyTGIs(family).map(tgi => this.find(tgi));
return arr.length > 0 ? arr : null;
}
// ## getHierarchicExemplar(exemplar)
// Creates a small wrapper around the given exemplar that looks up values in
// the exemplar's parent cohort if they are not present in the exemplar
// itself.
getHierarchicExemplar(exemplar) {
return {
get: (key) => {
return this.getPropertyValue(exemplar, key);
},
getAsync: async (key) => {
return await this.getPropertyValueAsync(exemplar, key);
},
};
}
// ## getProperty(exemplar, key)
// This function accepts a parsed exemplar file and looks up the property
// with the given key. If the property doesn't exist, then tries to look
// it up in the parent cohort and so on all the way up.
getProperty(exemplar, key) {
let prop = exemplar.prop(key);
while (!prop && exemplar.parent.type) {
let { parent } = exemplar;
let entry = this.find(parent);
if (!entry) {
break;
}
;
// Apparently Exemplar files can specify non-Cohort files as their
// parent cohorts. This happens for example with the NAM. We need to
// handle this gracefully.
if (!(entry.isType(FileType.Exemplar) ||
entry.isType(FileType.Cohort))) {
break;
}
exemplar = entry.read();
if (typeof exemplar.prop !== 'function') {
console.log('Something wrong', entry.dbpf.file, entry);
console.log('-'.repeat(100));
}
prop = exemplar.prop(key);
}
return prop;
}
// ## getPropertyValue(exemplar, key)
// Directly returns the value for the given property in the exemplar. If
// it doesn't exist, looks it up in the parent cohort.
getPropertyValue(exemplar, key) {
let prop = this.getProperty(exemplar, key);
return prop ? prop.getSafeValue() : undefined;
}
// ## getPropertyAsync()
// Same as .getProperty(), but in an async way, because we might need to
// look up a parent cohort.
async getPropertyAsync(exemplar, key) {
let prop = exemplar.prop(key);
while (!prop && exemplar.parent.type) {
let { parent } = exemplar;
let entry = this.find(parent);
if (!entry) {
break;
}
;
// Apparently Exemplar files can specify non-Cohort files as their
// parent cohorts. This happens for example with the NAM. We need to
// handle this gracefully.
if (!(entry.isType(FileType.Exemplar) ||
entry.isType(FileType.Cohort))) {
break;
}
exemplar = await entry.readAsync();
if (typeof exemplar.prop !== 'function') {
console.log('Something wrong', entry.dbpf.file, entry);
console.log('-'.repeat(100));
}
prop = exemplar.prop(key);
}
return prop;
}
// ## getPropertyValueAsync()
// Same as getPropertyValue(), but in an async way, which is required in the
// browser, but also speeds up indexing the building families sometimes.
async getPropertyValueAsync(exemplar, key) {
let prop = await this.getPropertyAsync(exemplar, key);
return prop ? prop.getSafeValue() : undefined;
}
// ## toBuffer()
toBuffer() {
// The first thing we'll do is serialize the file paths for every dbpf.
const { dbpfs } = this;
const dbpfToIndex = new Map();
const ws = new SmartBuffer();
ws.writeUInt32LE(dbpfs.length);
for (let i = 0; i < dbpfs.length; i++) {
const dbpf = dbpfs[i];
dbpfToIndex.set(dbpf, i);
ws.writeStringNT(dbpf.file);
}
// For the entries itself, we won't serialize the TGI, because we will
// be able to re-use those from the binary index itself. However, we
// will need to serialize the offset, size and pointer to the dbpf they
// are part of.
const { entries } = this;
ws.writeUInt32LE(entries.length);
for (let entry of entries) {
const { offset, size } = entry;
ws.writeUInt32LE(offset);
ws.writeUInt32LE(size);
const ptr = dbpfToIndex.get(entry.dbpf);
ws.writeUInt32LE(ptr);
}
// Next we serialize the actual TGI index. This is relatively easy now,
// because we can simply re-use the underlying Uint32Arrays. Note that
// this means it depends on the system endianness.
const index = entries.index.serialize();
ws.writeUInt32LE(index.byteLength);
ws.writeBuffer(index);
// At last we serialize the families as well.
const { families } = this;
ws.writeInt32LE(families.size);
for (let [family, tgis] of this.families) {
ws.writeUInt32LE(family);
ws.writeInt32LE(tgis.length);
for (let tgi of tgis) {
ws.writeUInt32LE(tgi.type);
ws.writeUInt32LE(tgi.group);
ws.writeUInt32LE(tgi.instance);
}
}
return ws.toUint8Array();
}
// ## load(buffer)
// Instead of building up an index, we can also read in a cache index.
// That's useful if we're often running a script on a large plugins folder
// where we're sure the folder doesn't change. We can gain a lot of precious
// time by reading in a cached version in this case!
load(buffer) {
// First we'll read in all the dbpfs.
const rs = SmartBuffer.fromBuffer(buffer);
this.dbpfs = new Array(rs.readUInt32LE());
for (let i = 0; i < this.dbpfs.length; i++) {
const file = rs.readStringNT();
this.dbpfs[i] = new DBPF({ file, parse: false });
}
// Next we'll restore all the entries. This is a bit special because we
// first have the list of offset, size & dbpf pointer, and then comes
// the buffer for the index, from which we read the TGIs.
this.entries = new TGIIndex(rs.readUInt32LE());
for (let i = 0; i < this.entries.length; i++) {
const offset = rs.readUInt32LE();
const size = rs.readUInt32LE();
const ptr = rs.readUInt32LE();
this.entries[i] = new Entry({
offset,
size,
dbpf: this.dbpfs[ptr],
});
}
// Reconstruct the index straight from the index buffer.
const indexSize = rs.readUInt32LE();
const indexBuffer = rs.readUint8Array(indexSize);
this.entries.load(indexBuffer);
const { tgis } = this.entries.index;
for (let i = 0, iii = 0; i < this.entries.length; i++, iii += 3) {
const type = tgis[iii];
const group = tgis[iii + 1];
const instance = tgis[iii + 2];
this.entries[i].tgi = new TGI(type, group, instance);
}
// At last we'll restore the families as well.
this.families = new Map();
const size = rs.readUInt32LE();
for (let i = 0; i < size; i++) {
const family = rs.readUInt32LE();
const tgis = new Array(rs.readUInt32LE());
for (let i = 0; i < tgis.length; i++) {
tgis[i] = new TGI(rs.readUInt32LE(), rs.readUInt32LE(), rs.readUInt32LE());
}
this.families.set(family, tgis);
}
return this;
}
// ## *[Symbol.iterator]() {
*[Symbol.iterator]() {
yield* this.entries;
}
}