UNPKG

@tamgl/colyseus-schema

Version:

Binary state serializer with delta encoding for games

1,439 lines (1,429 loc) 195 kB
const SWITCH_TO_STRUCTURE = 255; // (decoding collides with DELETE_AND_ADD + fieldIndex = 63) const TYPE_ID = 213; /** * Encoding Schema field operations. */ var OPERATION; (function (OPERATION) { OPERATION[OPERATION["ADD"] = 128] = "ADD"; OPERATION[OPERATION["REPLACE"] = 0] = "REPLACE"; OPERATION[OPERATION["DELETE"] = 64] = "DELETE"; OPERATION[OPERATION["DELETE_AND_MOVE"] = 96] = "DELETE_AND_MOVE"; OPERATION[OPERATION["MOVE_AND_ADD"] = 160] = "MOVE_AND_ADD"; OPERATION[OPERATION["DELETE_AND_ADD"] = 192] = "DELETE_AND_ADD"; /** * Collection operations */ OPERATION[OPERATION["CLEAR"] = 10] = "CLEAR"; /** * ArraySchema operations */ OPERATION[OPERATION["REVERSE"] = 15] = "REVERSE"; OPERATION[OPERATION["MOVE"] = 32] = "MOVE"; OPERATION[OPERATION["DELETE_BY_REFID"] = 33] = "DELETE_BY_REFID"; OPERATION[OPERATION["ADD_BY_REFID"] = 129] = "ADD_BY_REFID"; })(OPERATION || (OPERATION = {})); Symbol.metadata ??= Symbol.for("Symbol.metadata"); const $track = Symbol("$track"); const $encoder = Symbol("$encoder"); const $decoder = Symbol("$decoder"); const $filter = Symbol("$filter"); const $getByIndex = Symbol("$getByIndex"); const $deleteByIndex = Symbol("$deleteByIndex"); /** * Used to hold ChangeTree instances whitin the structures */ const $changes = Symbol('$changes'); /** * Used to keep track of the type of the child elements of a collection * (MapSchema, ArraySchema, etc.) */ const $childType = Symbol('$childType'); /** * Optional "discard" method for custom types (ArraySchema) * (Discards changes for next serialization) */ const $onEncodeEnd = Symbol('$onEncodeEnd'); /** * When decoding, this method is called after the instance is fully decoded */ const $onDecodeEnd = Symbol("$onDecodeEnd"); /** * Metadata */ const $descriptors = Symbol("$descriptors"); const $numFields = "$__numFields"; const $refTypeFieldIndexes = "$__refTypeFieldIndexes"; const $viewFieldIndexes = "$__viewFieldIndexes"; const $fieldIndexesByViewTag = "$__fieldIndexesByViewTag"; /** * Copyright (c) 2018 Endel Dreyer * Copyright (c) 2014 Ion Drive Software Ltd. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE */ /** * msgpack implementation highly based on notepack.io * https://github.com/darrachequesne/notepack */ let textEncoder; // @ts-ignore try { textEncoder = new TextEncoder(); } catch (e) { } const _convoBuffer$1 = new ArrayBuffer(8); const _int32$1 = new Int32Array(_convoBuffer$1); const _float32$1 = new Float32Array(_convoBuffer$1); const _float64$1 = new Float64Array(_convoBuffer$1); const _int64$1 = new BigInt64Array(_convoBuffer$1); const hasBufferByteLength = (typeof Buffer !== 'undefined' && Buffer.byteLength); const utf8Length = (hasBufferByteLength) ? Buffer.byteLength // node : function (str, _) { var c = 0, length = 0; for (var i = 0, l = str.length; i < l; i++) { c = str.charCodeAt(i); if (c < 0x80) { length += 1; } else if (c < 0x800) { length += 2; } else if (c < 0xd800 || c >= 0xe000) { length += 3; } else { i++; length += 4; } } return length; }; function utf8Write(view, str, it) { var c = 0; for (var i = 0, l = str.length; i < l; i++) { c = str.charCodeAt(i); if (c < 0x80) { view[it.offset++] = c; } else if (c < 0x800) { view[it.offset] = 0xc0 | (c >> 6); view[it.offset + 1] = 0x80 | (c & 0x3f); it.offset += 2; } else if (c < 0xd800 || c >= 0xe000) { view[it.offset] = 0xe0 | (c >> 12); view[it.offset + 1] = 0x80 | (c >> 6 & 0x3f); view[it.offset + 2] = 0x80 | (c & 0x3f); it.offset += 3; } else { i++; c = 0x10000 + (((c & 0x3ff) << 10) | (str.charCodeAt(i) & 0x3ff)); view[it.offset] = 0xf0 | (c >> 18); view[it.offset + 1] = 0x80 | (c >> 12 & 0x3f); view[it.offset + 2] = 0x80 | (c >> 6 & 0x3f); view[it.offset + 3] = 0x80 | (c & 0x3f); it.offset += 4; } } } function int8$1(bytes, value, it) { bytes[it.offset++] = value & 255; } function uint8$1(bytes, value, it) { bytes[it.offset++] = value & 255; } function int16$1(bytes, value, it) { bytes[it.offset++] = value & 255; bytes[it.offset++] = (value >> 8) & 255; } function uint16$1(bytes, value, it) { bytes[it.offset++] = value & 255; bytes[it.offset++] = (value >> 8) & 255; } function int32$1(bytes, value, it) { bytes[it.offset++] = value & 255; bytes[it.offset++] = (value >> 8) & 255; bytes[it.offset++] = (value >> 16) & 255; bytes[it.offset++] = (value >> 24) & 255; } function uint32$1(bytes, value, it) { const b4 = value >> 24; const b3 = value >> 16; const b2 = value >> 8; const b1 = value; bytes[it.offset++] = b1 & 255; bytes[it.offset++] = b2 & 255; bytes[it.offset++] = b3 & 255; bytes[it.offset++] = b4 & 255; } function int64$1(bytes, value, it) { const high = Math.floor(value / Math.pow(2, 32)); const low = value >>> 0; uint32$1(bytes, low, it); uint32$1(bytes, high, it); } function uint64$1(bytes, value, it) { const high = (value / Math.pow(2, 32)) >> 0; const low = value >>> 0; uint32$1(bytes, low, it); uint32$1(bytes, high, it); } function bigint64$1(bytes, value, it) { _int64$1[0] = BigInt.asIntN(64, value); int32$1(bytes, _int32$1[0], it); int32$1(bytes, _int32$1[1], it); } function biguint64$1(bytes, value, it) { _int64$1[0] = BigInt.asIntN(64, value); int32$1(bytes, _int32$1[0], it); int32$1(bytes, _int32$1[1], it); } function float32$1(bytes, value, it) { _float32$1[0] = value; int32$1(bytes, _int32$1[0], it); } function float64$1(bytes, value, it) { _float64$1[0] = value; int32$1(bytes, _int32$1[0 ], it); int32$1(bytes, _int32$1[1 ], it); } function boolean$1(bytes, value, it) { bytes[it.offset++] = value ? 1 : 0; // uint8 } function string$1(bytes, value, it) { // encode `null` strings as empty. if (!value) { value = ""; } let length = utf8Length(value, "utf8"); let size = 0; // fixstr if (length < 0x20) { bytes[it.offset++] = length | 0xa0; size = 1; } // str 8 else if (length < 0x100) { bytes[it.offset++] = 0xd9; bytes[it.offset++] = length % 255; size = 2; } // str 16 else if (length < 0x10000) { bytes[it.offset++] = 0xda; uint16$1(bytes, length, it); size = 3; } // str 32 else if (length < 0x100000000) { bytes[it.offset++] = 0xdb; uint32$1(bytes, length, it); size = 5; } else { throw new Error('String too long'); } utf8Write(bytes, value, it); return size + length; } function number$1(bytes, value, it) { if (isNaN(value)) { return number$1(bytes, 0, it); } else if (!isFinite(value)) { return number$1(bytes, (value > 0) ? Number.MAX_SAFE_INTEGER : -Number.MAX_SAFE_INTEGER, it); } else if (value !== (value | 0)) { if (Math.abs(value) <= 3.4028235e+38) { // range check _float32$1[0] = value; if (Math.abs(Math.abs(_float32$1[0]) - Math.abs(value)) < 1e-4) { // precision check; adjust 1e-n (n = precision) to in-/decrease acceptable precision loss // now we know value is in range for f32 and has acceptable precision for f32 bytes[it.offset++] = 0xca; float32$1(bytes, value, it); return 5; } } bytes[it.offset++] = 0xcb; float64$1(bytes, value, it); return 9; } if (value >= 0) { // positive fixnum if (value < 0x80) { bytes[it.offset++] = value & 255; // uint8 return 1; } // uint 8 if (value < 0x100) { bytes[it.offset++] = 0xcc; bytes[it.offset++] = value & 255; // uint8 return 2; } // uint 16 if (value < 0x10000) { bytes[it.offset++] = 0xcd; uint16$1(bytes, value, it); return 3; } // uint 32 if (value < 0x100000000) { bytes[it.offset++] = 0xce; uint32$1(bytes, value, it); return 5; } // uint 64 bytes[it.offset++] = 0xcf; uint64$1(bytes, value, it); return 9; } else { // negative fixnum if (value >= -32) { bytes[it.offset++] = 0xe0 | (value + 0x20); return 1; } // int 8 if (value >= -128) { bytes[it.offset++] = 0xd0; int8$1(bytes, value, it); return 2; } // int 16 if (value >= -32768) { bytes[it.offset++] = 0xd1; int16$1(bytes, value, it); return 3; } // int 32 if (value >= -2147483648) { bytes[it.offset++] = 0xd2; int32$1(bytes, value, it); return 5; } // int 64 bytes[it.offset++] = 0xd3; int64$1(bytes, value, it); return 9; } } const encode = { int8: int8$1, uint8: uint8$1, int16: int16$1, uint16: uint16$1, int32: int32$1, uint32: uint32$1, int64: int64$1, uint64: uint64$1, bigint64: bigint64$1, biguint64: biguint64$1, float32: float32$1, float64: float64$1, boolean: boolean$1, string: string$1, number: number$1, utf8Write, utf8Length, }; /** * Copyright (c) 2018 Endel Dreyer * Copyright (c) 2014 Ion Drive Software Ltd. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE */ // force little endian to facilitate decoding on multiple implementations const _convoBuffer = new ArrayBuffer(8); const _int32 = new Int32Array(_convoBuffer); const _float32 = new Float32Array(_convoBuffer); const _float64 = new Float64Array(_convoBuffer); const _uint64 = new BigUint64Array(_convoBuffer); const _int64 = new BigInt64Array(_convoBuffer); function utf8Read(bytes, it, length) { var string = '', chr = 0; for (var i = it.offset, end = it.offset + length; i < end; i++) { var byte = bytes[i]; if ((byte & 0x80) === 0x00) { string += String.fromCharCode(byte); continue; } if ((byte & 0xe0) === 0xc0) { string += String.fromCharCode(((byte & 0x1f) << 6) | (bytes[++i] & 0x3f)); continue; } if ((byte & 0xf0) === 0xe0) { string += String.fromCharCode(((byte & 0x0f) << 12) | ((bytes[++i] & 0x3f) << 6) | ((bytes[++i] & 0x3f) << 0)); continue; } if ((byte & 0xf8) === 0xf0) { chr = ((byte & 0x07) << 18) | ((bytes[++i] & 0x3f) << 12) | ((bytes[++i] & 0x3f) << 6) | ((bytes[++i] & 0x3f) << 0); if (chr >= 0x010000) { // surrogate pair chr -= 0x010000; string += String.fromCharCode((chr >>> 10) + 0xD800, (chr & 0x3FF) + 0xDC00); } else { string += String.fromCharCode(chr); } continue; } console.error('Invalid byte ' + byte.toString(16)); // (do not throw error to avoid server/client from crashing due to hack attemps) // throw new Error('Invalid byte ' + byte.toString(16)); } it.offset += length; return string; } function int8(bytes, it) { return uint8(bytes, it) << 24 >> 24; } function uint8(bytes, it) { return bytes[it.offset++]; } function int16(bytes, it) { return uint16(bytes, it) << 16 >> 16; } function uint16(bytes, it) { return bytes[it.offset++] | bytes[it.offset++] << 8; } function int32(bytes, it) { return bytes[it.offset++] | bytes[it.offset++] << 8 | bytes[it.offset++] << 16 | bytes[it.offset++] << 24; } function uint32(bytes, it) { return int32(bytes, it) >>> 0; } function float32(bytes, it) { _int32[0] = int32(bytes, it); return _float32[0]; } function float64(bytes, it) { _int32[0 ] = int32(bytes, it); _int32[1 ] = int32(bytes, it); return _float64[0]; } function int64(bytes, it) { const low = uint32(bytes, it); const high = int32(bytes, it) * Math.pow(2, 32); return high + low; } function uint64(bytes, it) { const low = uint32(bytes, it); const high = uint32(bytes, it) * Math.pow(2, 32); return high + low; } function bigint64(bytes, it) { _int32[0] = int32(bytes, it); _int32[1] = int32(bytes, it); return _int64[0]; } function biguint64(bytes, it) { _int32[0] = int32(bytes, it); _int32[1] = int32(bytes, it); return _uint64[0]; } function boolean(bytes, it) { return uint8(bytes, it) > 0; } function string(bytes, it) { const prefix = bytes[it.offset++]; let length; if (prefix < 0xc0) { // fixstr length = prefix & 0x1f; } else if (prefix === 0xd9) { length = uint8(bytes, it); } else if (prefix === 0xda) { length = uint16(bytes, it); } else if (prefix === 0xdb) { length = uint32(bytes, it); } return utf8Read(bytes, it, length); } function number(bytes, it) { const prefix = bytes[it.offset++]; if (prefix < 0x80) { // positive fixint return prefix; } else if (prefix === 0xca) { // float 32 return float32(bytes, it); } else if (prefix === 0xcb) { // float 64 return float64(bytes, it); } else if (prefix === 0xcc) { // uint 8 return uint8(bytes, it); } else if (prefix === 0xcd) { // uint 16 return uint16(bytes, it); } else if (prefix === 0xce) { // uint 32 return uint32(bytes, it); } else if (prefix === 0xcf) { // uint 64 return uint64(bytes, it); } else if (prefix === 0xd0) { // int 8 return int8(bytes, it); } else if (prefix === 0xd1) { // int 16 return int16(bytes, it); } else if (prefix === 0xd2) { // int 32 return int32(bytes, it); } else if (prefix === 0xd3) { // int 64 return int64(bytes, it); } else if (prefix > 0xdf) { // negative fixint return (0xff - prefix + 1) * -1; } } function stringCheck(bytes, it) { const prefix = bytes[it.offset]; return ( // fixstr (prefix < 0xc0 && prefix > 0xa0) || // str 8 prefix === 0xd9 || // str 16 prefix === 0xda || // str 32 prefix === 0xdb); } const decode = { utf8Read, int8, uint8, int16, uint16, int32, uint32, float32, float64, int64, uint64, bigint64, biguint64, boolean, string, number, stringCheck, }; const registeredTypes = {}; const identifiers = new Map(); function registerType(identifier, definition) { if (definition.constructor) { identifiers.set(definition.constructor, identifier); registeredTypes[identifier] = definition; } if (definition.encode) { encode[identifier] = definition.encode; } if (definition.decode) { decode[identifier] = definition.decode; } } function getType(identifier) { return registeredTypes[identifier]; } function defineCustomTypes(types) { for (const identifier in types) { registerType(identifier, types[identifier]); } return (t) => type(t); } class TypeContext { /** * For inheritance support * Keeps track of which classes extends which. (parent -> children) */ static { this.inheritedTypes = new Map(); } static { this.cachedContexts = new Map(); } static register(target) { const parent = Object.getPrototypeOf(target); if (parent !== Schema) { let inherits = TypeContext.inheritedTypes.get(parent); if (!inherits) { inherits = new Set(); TypeContext.inheritedTypes.set(parent, inherits); } inherits.add(target); } } static cache(rootClass) { let context = TypeContext.cachedContexts.get(rootClass); if (!context) { context = new TypeContext(rootClass); TypeContext.cachedContexts.set(rootClass, context); } return context; } constructor(rootClass) { this.types = {}; this.schemas = new Map(); this.hasFilters = false; this.parentFiltered = {}; if (rootClass) { this.discoverTypes(rootClass); } } has(schema) { return this.schemas.has(schema); } get(typeid) { return this.types[typeid]; } add(schema, typeid = this.schemas.size) { // skip if already registered if (this.schemas.has(schema)) { return false; } this.types[typeid] = schema; // // Workaround to allow using an empty Schema (with no `@type()` fields) // if (schema[Symbol.metadata] === undefined) { Metadata.initialize(schema); } this.schemas.set(schema, typeid); return true; } getTypeId(klass) { return this.schemas.get(klass); } discoverTypes(klass, parentType, parentIndex, parentHasViewTag) { if (parentHasViewTag) { this.registerFilteredByParent(klass, parentType, parentIndex); } // skip if already registered if (!this.add(klass)) { return; } // add classes inherited from this base class TypeContext.inheritedTypes.get(klass)?.forEach((child) => { this.discoverTypes(child, parentType, parentIndex, parentHasViewTag); }); // add parent classes let parent = klass; while ((parent = Object.getPrototypeOf(parent)) && parent !== Schema && // stop at root (Schema) parent !== Function.prototype // stop at root (non-Schema) ) { this.discoverTypes(parent); } const metadata = (klass[Symbol.metadata] ??= {}); // if any schema/field has filters, mark "context" as having filters. if (metadata[$viewFieldIndexes]) { this.hasFilters = true; } for (const fieldIndex in metadata) { const index = fieldIndex; const fieldType = metadata[index].type; const fieldHasViewTag = (metadata[index].tag !== undefined); if (typeof (fieldType) === "string") { continue; } if (Array.isArray(fieldType)) { const type = fieldType[0]; // skip primitive types if (type === "string") { continue; } this.discoverTypes(type, klass, index, parentHasViewTag || fieldHasViewTag); } else if (typeof (fieldType) === "function") { this.discoverTypes(fieldType, klass, index, parentHasViewTag || fieldHasViewTag); } else { const type = Object.values(fieldType)[0]; // skip primitive types if (typeof (type) === "string") { continue; } this.discoverTypes(type, klass, index, parentHasViewTag || fieldHasViewTag); } } } /** * Keep track of which classes have filters applied. * Format: `${typeid}-${parentTypeid}-${parentIndex}` */ registerFilteredByParent(schema, parentType, parentIndex) { const typeid = this.schemas.get(schema) ?? this.schemas.size; let key = `${typeid}`; if (parentType) { key += `-${this.schemas.get(parentType)}`; } key += `-${parentIndex}`; this.parentFiltered[key] = true; } debug() { let parentFiltered = ""; for (const key in this.parentFiltered) { const keys = key.split("-").map(Number); const fieldIndex = keys.pop(); parentFiltered += `\n\t\t`; parentFiltered += `${key}: ${keys.reverse().map((id, i) => { const klass = this.types[id]; const metadata = klass[Symbol.metadata]; let txt = klass.name; if (i === 0) { txt += `[${metadata[fieldIndex].name}]`; } return `${txt}`; }).join(" -> ")}`; } return `TypeContext ->\n` + `\tSchema types: ${this.schemas.size}\n` + `\thasFilters: ${this.hasFilters}\n` + `\tparentFiltered:${parentFiltered}`; } } function getNormalizedType(type) { return (Array.isArray(type)) ? { array: type[0] } : (typeof (type['type']) !== "undefined") ? type['type'] : type; } const Metadata = { addField(metadata, index, name, type, descriptor) { if (index > 64) { throw new Error(`Can't define field '${name}'.\nSchema instances may only have up to 64 fields.`); } metadata[index] = Object.assign(metadata[index] || {}, // avoid overwriting previous field metadata (@owned / @deprecated) { type: getNormalizedType(type), index, name, }); // create "descriptors" map Object.defineProperty(metadata, $descriptors, { value: metadata[$descriptors] || {}, enumerable: false, configurable: true, }); if (descriptor) { // for encoder metadata[$descriptors][name] = descriptor; metadata[$descriptors][`_${name}`] = { value: undefined, writable: true, enumerable: false, configurable: true, }; } else { // for decoder metadata[$descriptors][name] = { value: undefined, writable: true, enumerable: true, configurable: true, }; } // map -1 as last field index Object.defineProperty(metadata, $numFields, { value: index, enumerable: false, configurable: true }); // map field name => index (non enumerable) Object.defineProperty(metadata, name, { value: index, enumerable: false, configurable: true, }); // if child Ref/complex type, add to -4 if (typeof (metadata[index].type) !== "string") { if (metadata[$refTypeFieldIndexes] === undefined) { Object.defineProperty(metadata, $refTypeFieldIndexes, { value: [], enumerable: false, configurable: true, }); } metadata[$refTypeFieldIndexes].push(index); } }, setTag(metadata, fieldName, tag) { const index = metadata[fieldName]; const field = metadata[index]; // add 'tag' to the field field.tag = tag; if (!metadata[$viewFieldIndexes]) { // -2: all field indexes with "view" tag Object.defineProperty(metadata, $viewFieldIndexes, { value: [], enumerable: false, configurable: true }); // -3: field indexes by "view" tag Object.defineProperty(metadata, $fieldIndexesByViewTag, { value: {}, enumerable: false, configurable: true }); } metadata[$viewFieldIndexes].push(index); if (!metadata[$fieldIndexesByViewTag][tag]) { metadata[$fieldIndexesByViewTag][tag] = []; } metadata[$fieldIndexesByViewTag][tag].push(index); }, setFields(target, fields) { // for inheritance support const constructor = target.prototype.constructor; TypeContext.register(constructor); const parentClass = Object.getPrototypeOf(constructor); const parentMetadata = parentClass && parentClass[Symbol.metadata]; const metadata = Metadata.initialize(constructor); // Use Schema's methods if not defined in the class if (!constructor[$track]) { constructor[$track] = Schema[$track]; } if (!constructor[$encoder]) { constructor[$encoder] = Schema[$encoder]; } if (!constructor[$decoder]) { constructor[$decoder] = Schema[$decoder]; } if (!constructor.prototype.toJSON) { constructor.prototype.toJSON = Schema.prototype.toJSON; } // // detect index for this field, considering inheritance // let fieldIndex = metadata[$numFields] // current structure already has fields defined ?? (parentMetadata && parentMetadata[$numFields]) // parent structure has fields defined ?? -1; // no fields defined fieldIndex++; for (const field in fields) { const type = fields[field]; // FIXME: this code is duplicated from @type() annotation const complexTypeKlass = (Array.isArray(type)) ? getType("array") : (typeof (Object.keys(type)[0]) === "string") && getType(Object.keys(type)[0]); const childType = (complexTypeKlass) ? Object.values(type)[0] : getNormalizedType(type); Metadata.addField(metadata, fieldIndex, field, type, getPropertyDescriptor(`_${field}`, fieldIndex, childType, complexTypeKlass)); fieldIndex++; } return target; }, isDeprecated(metadata, field) { return metadata[field].deprecated === true; }, init(klass) { // // Used only to initialize an empty Schema (Encoder#constructor) // TODO: remove/refactor this... // const metadata = {}; klass[Symbol.metadata] = metadata; Object.defineProperty(metadata, $numFields, { value: 0, enumerable: false, configurable: true, }); }, initialize(constructor) { const parentClass = Object.getPrototypeOf(constructor); const parentMetadata = parentClass[Symbol.metadata]; let metadata = constructor[Symbol.metadata] ?? Object.create(null); // make sure inherited classes have their own metadata object. if (parentClass !== Schema && metadata === parentMetadata) { metadata = Object.create(null); if (parentMetadata) { // // assign parent metadata to current // Object.setPrototypeOf(metadata, parentMetadata); // $numFields Object.defineProperty(metadata, $numFields, { value: parentMetadata[$numFields], enumerable: false, configurable: true, writable: true, }); // $viewFieldIndexes / $fieldIndexesByViewTag if (parentMetadata[$viewFieldIndexes] !== undefined) { Object.defineProperty(metadata, $viewFieldIndexes, { value: [...parentMetadata[$viewFieldIndexes]], enumerable: false, configurable: true, writable: true, }); Object.defineProperty(metadata, $fieldIndexesByViewTag, { value: { ...parentMetadata[$fieldIndexesByViewTag] }, enumerable: false, configurable: true, writable: true, }); } // $refTypeFieldIndexes if (parentMetadata[$refTypeFieldIndexes] !== undefined) { Object.defineProperty(metadata, $refTypeFieldIndexes, { value: [...parentMetadata[$refTypeFieldIndexes]], enumerable: false, configurable: true, writable: true, }); } // $descriptors Object.defineProperty(metadata, $descriptors, { value: { ...parentMetadata[$descriptors] }, enumerable: false, configurable: true, writable: true, }); } } constructor[Symbol.metadata] = metadata; return metadata; }, isValidInstance(klass) { return (klass.constructor[Symbol.metadata] && Object.prototype.hasOwnProperty.call(klass.constructor[Symbol.metadata], $numFields)); }, getFields(klass) { const metadata = klass[Symbol.metadata]; const fields = {}; for (let i = 0; i <= metadata[$numFields]; i++) { fields[metadata[i].name] = metadata[i].type; } return fields; }, hasViewTagAtIndex(metadata, index) { return metadata?.[$viewFieldIndexes]?.includes(index); } }; function createChangeSet() { return { indexes: {}, operations: [] }; } function setOperationAtIndex(changeSet, index) { const operationsIndex = changeSet.indexes[index]; if (operationsIndex === undefined) { changeSet.indexes[index] = changeSet.operations.push(index) - 1; } else { changeSet.operations[operationsIndex] = index; } } function deleteOperationAtIndex(changeSet, index) { let operationsIndex = changeSet.indexes[index]; if (operationsIndex === undefined) { // // if index is not found, we need to find the last operation // FIXME: this is not very efficient // // > See "should allow consecutive splices (same place)" tests // operationsIndex = Object.values(changeSet.indexes).at(-1); index = Object.entries(changeSet.indexes).find(([_, value]) => value === operationsIndex)?.[0]; } changeSet.operations[operationsIndex] = undefined; delete changeSet.indexes[index]; } function enqueueChangeTree(root, changeTree, changeSet, queueRootIndex = changeTree[changeSet].queueRootIndex) { if (!root) { // skip return; } else if (root[changeSet][queueRootIndex] !== changeTree) { changeTree[changeSet].queueRootIndex = root[changeSet].push(changeTree) - 1; } } class ChangeTree { constructor(ref) { /** * Whether this structure is parent of a filtered structure. */ this.isFiltered = false; this.indexedOperations = {}; // // TODO: // try storing the index + operation per item. // example: 1024 & 1025 => ADD, 1026 => DELETE // // => https://chatgpt.com/share/67107d0c-bc20-8004-8583-83b17dd7c196 // this.changes = { indexes: {}, operations: [] }; this.allChanges = { indexes: {}, operations: [] }; /** * Is this a new instance? Used on ArraySchema to determine OPERATION.MOVE_AND_ADD operation. */ this.isNew = true; this.ref = ref; // // Does this structure have "filters" declared? // const metadata = ref.constructor[Symbol.metadata]; if (metadata?.[$viewFieldIndexes]) { this.allFilteredChanges = { indexes: {}, operations: [] }; this.filteredChanges = { indexes: {}, operations: [] }; } } setRoot(root) { this.root = root; this.checkIsFiltered(this.parent, this.parentIndex); // // TODO: refactor and possibly unify .setRoot() and .setParent() // // Recursively set root on child structures const metadata = this.ref.constructor[Symbol.metadata]; if (metadata) { metadata[$refTypeFieldIndexes]?.forEach((index) => { const field = metadata[index]; const changeTree = this.ref[field.name]?.[$changes]; if (changeTree) { if (changeTree.root !== root) { changeTree.setRoot(root); } else { root.add(changeTree); // increment refCount } } }); } else if (this.ref[$childType] && typeof (this.ref[$childType]) !== "string") { // MapSchema / ArraySchema, etc. this.ref.forEach((value, key) => { const changeTree = value[$changes]; if (changeTree.root !== root) { changeTree.setRoot(root); } else { root.add(changeTree); // increment refCount } }); } } setParent(parent, root, parentIndex) { this.parent = parent; this.parentIndex = parentIndex; // avoid setting parents with empty `root` if (!root) { return; } // skip if parent is already set if (root !== this.root) { this.root = root; this.checkIsFiltered(parent, parentIndex); } else { root.add(this); } // assign same parent on child structures const metadata = this.ref.constructor[Symbol.metadata]; if (metadata) { metadata[$refTypeFieldIndexes]?.forEach((index) => { const field = metadata[index]; const changeTree = this.ref[field.name]?.[$changes]; if (changeTree && changeTree.root !== root) { changeTree.setParent(this.ref, root, index); } }); } else if (this.ref[$childType] && typeof (this.ref[$childType]) !== "string") { // MapSchema / ArraySchema, etc. this.ref.forEach((value, key) => { const changeTree = value[$changes]; if (changeTree.root !== root) { changeTree.setParent(this.ref, root, this.indexes[key] ?? key); } }); } } forEachChild(callback) { // // assign same parent on child structures // const metadata = this.ref.constructor[Symbol.metadata]; if (metadata) { metadata[$refTypeFieldIndexes]?.forEach((index) => { const field = metadata[index]; const value = this.ref[field.name]; if (value) { callback(value[$changes], index); } }); } else if (this.ref[$childType] && typeof (this.ref[$childType]) !== "string") { // MapSchema / ArraySchema, etc. this.ref.forEach((value, key) => { callback(value[$changes], this.indexes[key] ?? key); }); } } operation(op) { // operations without index use negative values to represent them // this is checked during .encode() time. if (this.filteredChanges !== undefined) { this.filteredChanges.operations.push(-op); enqueueChangeTree(this.root, this, 'filteredChanges'); } else { this.changes.operations.push(-op); enqueueChangeTree(this.root, this, 'changes'); } } change(index, operation = OPERATION.ADD) { const metadata = this.ref.constructor[Symbol.metadata]; const isFiltered = this.isFiltered || (metadata?.[index]?.tag !== undefined); const changeSet = (isFiltered) ? this.filteredChanges : this.changes; const previousOperation = this.indexedOperations[index]; if (!previousOperation || previousOperation === OPERATION.DELETE) { const op = (!previousOperation) ? operation : (previousOperation === OPERATION.DELETE) ? OPERATION.DELETE_AND_ADD : operation; // // TODO: are DELETE operations being encoded as ADD here ?? // this.indexedOperations[index] = op; } setOperationAtIndex(changeSet, index); if (isFiltered) { setOperationAtIndex(this.allFilteredChanges, index); if (this.root) { enqueueChangeTree(this.root, this, 'filteredChanges'); enqueueChangeTree(this.root, this, 'allFilteredChanges'); } } else { setOperationAtIndex(this.allChanges, index); enqueueChangeTree(this.root, this, 'changes'); } } shiftChangeIndexes(shiftIndex) { // // Used only during: // // - ArraySchema#unshift() // const changeSet = (this.isFiltered) ? this.filteredChanges : this.changes; const newIndexedOperations = {}; const newIndexes = {}; for (const index in this.indexedOperations) { newIndexedOperations[Number(index) + shiftIndex] = this.indexedOperations[index]; newIndexes[Number(index) + shiftIndex] = changeSet.indexes[index]; } this.indexedOperations = newIndexedOperations; changeSet.indexes = newIndexes; changeSet.operations = changeSet.operations.map((index) => index + shiftIndex); } shiftAllChangeIndexes(shiftIndex, startIndex = 0) { // // Used only during: // // - ArraySchema#splice() // if (this.filteredChanges !== undefined) { this._shiftAllChangeIndexes(shiftIndex, startIndex, this.allFilteredChanges); this._shiftAllChangeIndexes(shiftIndex, startIndex, this.allChanges); } else { this._shiftAllChangeIndexes(shiftIndex, startIndex, this.allChanges); } } _shiftAllChangeIndexes(shiftIndex, startIndex = 0, changeSet) { const newIndexes = {}; let newKey = 0; for (const key in changeSet.indexes) { newIndexes[newKey++] = changeSet.indexes[key]; } changeSet.indexes = newIndexes; for (let i = 0; i < changeSet.operations.length; i++) { const index = changeSet.operations[i]; if (index > startIndex) { changeSet.operations[i] = index + shiftIndex; } } } indexedOperation(index, operation, allChangesIndex = index) { this.indexedOperations[index] = operation; if (this.filteredChanges !== undefined) { setOperationAtIndex(this.allFilteredChanges, allChangesIndex); setOperationAtIndex(this.filteredChanges, index); enqueueChangeTree(this.root, this, 'filteredChanges'); } else { setOperationAtIndex(this.allChanges, allChangesIndex); setOperationAtIndex(this.changes, index); enqueueChangeTree(this.root, this, 'changes'); } } getType(index) { if (Metadata.isValidInstance(this.ref)) { const metadata = this.ref.constructor[Symbol.metadata]; return metadata[index].type; } else { // // Get the child type from parent structure. // - ["string"] => "string" // - { map: "string" } => "string" // - { set: "string" } => "string" // return this.ref[$childType]; } } getChange(index) { return this.indexedOperations[index]; } // // used during `.encode()` // getValue(index, isEncodeAll = false) { // // `isEncodeAll` param is only used by ArraySchema // return this.ref[$getByIndex](index, isEncodeAll); } delete(index, operation, allChangesIndex = index) { if (index === undefined) { try { throw new Error(`@colyseus/schema ${this.ref.constructor.name}: trying to delete non-existing index '${index}'`); } catch (e) { console.warn(e); } return; } const changeSet = (this.filteredChanges !== undefined) ? this.filteredChanges : this.changes; this.indexedOperations[index] = operation ?? OPERATION.DELETE; setOperationAtIndex(changeSet, index); deleteOperationAtIndex(this.allChanges, allChangesIndex); const previousValue = this.getValue(index); // remove `root` reference if (previousValue && previousValue[$changes]) { // // FIXME: this.root is "undefined" // // This method is being called at decoding time when a DELETE operation is found. // // - This is due to using the concrete Schema class at decoding time. // - "Reflected" structures do not have this problem. // // (The property descriptors should NOT be used at decoding time. only at encoding time.) // this.root?.remove(previousValue[$changes]); } // // FIXME: this is looking a ugly and repeated // if (this.filteredChanges !== undefined) { deleteOperationAtIndex(this.allFilteredChanges, allChangesIndex); enqueueChangeTree(this.root, this, 'filteredChanges'); } else { enqueueChangeTree(this.root, this, 'changes'); } return previousValue; } endEncode(changeSetName) { this.indexedOperations = {}; // clear changeset this[changeSetName].indexes = {}; this[changeSetName].operations.length = 0; this[changeSetName].queueRootIndex = undefined; // ArraySchema and MapSchema have a custom "encode end" method this.ref[$onEncodeEnd]?.(); // Not a new instance anymore this.isNew = false; } discard(discardAll = false) { // // > MapSchema: // Remove cached key to ensure ADD operations is unsed instead of // REPLACE in case same key is used on next patches. // this.ref[$onEncodeEnd]?.(); this.indexedOperations = {}; this.changes.indexes = {}; this.changes.operations.length = 0; this.changes.queueRootIndex = undefined; if (this.filteredChanges !== undefined) { this.filteredChanges.indexes = {}; this.filteredChanges.operations.length = 0; this.filteredChanges.queueRootIndex = undefined; } if (discardAll) { this.allChanges.indexes = {}; this.allChanges.operations.length = 0; if (this.allFilteredChanges !== undefined) { this.allFilteredChanges.indexes = {}; this.allFilteredChanges.operations.length = 0; } } } /** * Recursively discard all changes from this, and child structures. * (Used in tests only) */ discardAll() { const keys = Object.keys(this.indexedOperations); for (let i = 0, len = keys.length; i < len; i++) { const value = this.getValue(Number(keys[i])); if (value && value[$changes]) { value[$changes].discardAll(); } } this.discard(); } ensureRefId() { // skip if refId is already set. if (this.refId !== undefined) { return; } this.refId = this.root.getNextUniqueId(); } get changed() { return (Object.entries(this.indexedOperations).length > 0); } checkIsFiltered(parent, parentIndex) { const isNewChangeTree = this.root.add(this); if (this.root.types.hasFilters) { // // At Schema initialization, the "root" structure might not be available // yet, as it only does once the "Encoder" has been set up. // // So the "parent" may be already set without a "root". // this._checkFilteredByParent(parent, parentIndex); if (this.filteredChanges !== undefined) { enqueueChangeTree(this.root, this, 'filteredChanges'); if (isNewChangeTree) { this.root.allFilteredChanges.push(this); } } } if (!this.isFiltered) { enqueueChangeTree(this.root, this, 'changes'); if (isNewChangeTree) { this.root.allChanges.push(this); } } } _checkFilteredByParent(parent, parentIndex) { // skip if parent is not set if (!parent) { return; } // // ArraySchema | MapSchema - get the child type // (if refType is typeof string, the parentFiltered[key] below will always be invalid) // const refType = Metadata.isValidInstance(this.ref) ? this.ref.constructor : this.ref[$childType]; let parentChangeTree; let parentIsCollection = !Metadata.isValidInstance(parent); if (parentIsCollection) { parentChangeTree = parent[$changes]; parent = parentChangeTree.parent; parentIndex = parentChangeTree.parentIndex; } else { parentChangeTree = parent[$changes]; } const parentConstructor = parent.constructor; let key = `${this.root.types.getTypeId(refType)}`; if (parentConstructor) { key += `-${this.root.types.schemas.get(parentConstructor)}`; } key += `-${parentIndex}`; const fieldHasViewTag = Metadata.hasViewTagAtIndex(parentConstructor?.[Symbol.metadata], parentIndex); this.isFiltered = parent[$changes].isFiltered // in case parent is already filtered || this.root.types.parentFiltered[key] || fieldHasViewTag; // // "isFiltered" may not be imedialely available during `change()` due to the instance not being attached to the root yet. // when it's available, we need to enqueue the "changes" changeset into the "filteredChanges" changeset. // if (this.isFiltered) { this.isVisibilitySharedWithParent = (parentChangeTree.isFiltered && typeof (refType) !== "string" && !fieldHasViewTag && parentIsCollection); if (!this.filteredChanges) { this.filteredChanges = createChangeSet(); this.allFilteredChanges = createChangeSet(); } if (this.changes.operations.length > 0) { this.changes.operations.forEach((index) => setOperationAtIndex(this.filteredChanges, index)); this.allChanges.operations.forEach((index) => setOperationAtIndex(this.allFilteredChanges, index)); this.changes = createChangeSet(); this.allChanges = createChangeSet(); } } } } function encodeValue(encoder, bytes, type, value, operation, it) { if (typeof (type) === "string") { encode[type]?.(bytes, value, it); } else if (type[Symbol.metadata] !== undefined) {