UNPKG

@teraflop/api

Version:

Teraflop game engine WebAssembly scripting API

1 lines 117 kB
{"version":3,"sources":["~lib/rt/common.ts","~lib/rt/tlsf.ts","~lib/shared/typeinfo.ts","~lib/rt/pure.ts","~lib/memory.ts","~lib/util/memory.ts","assembly/lib/plugin.ts","~lib/builtins.ts","assembly/index.ts"],"names":[],"mappings":"khBC+Qa,AAAO,AADF,OACc,oBAEnB,AAAO,AAAyB,AADhC,EAAY,KAC2B,SAAhC,EAAQ,qBAI1B,AAAI,EAAO,MAET,AAAK,AAAM,EAAQ,MAInB,AAAK,AAAO,EAAS,AADhB,EAAM,AAAW,MACI,IAAa,KACvC,EAAM,MAEG,AAAO,AAAgB,EAAK,KAArB,EAAK,qBAIvB,AAAW,OACX,AAAI,AAFO,SAED,EAAY,MACtB,AAAI,IAAM,EAAY,MAGtB,AAAI,EArIJ,AACE,EAA2B,AAAC,AAAkB,EAAjB,EAAM,IAAyB,UAQ9D,AACE,EAA2B,AAAC,AAAkB,EAAjB,EAAM,IAAyB,IAC5D,KA8HA,AAAI,AAAC,KAEa,AA7JlB,EAA2B,EAAM,SA6JN,EAAO,MAAhC,AApJF,EACA,KAsJE,AAAI,AAAC,KAAO,OAAc,EAAO,gBAzG1B,AAAO,mBAEP,AAAO,AADF,OACc,oBAM9B,AAAI,AAHY,qBAGA,KAEd,AAAI,AADU,AAAC,EAAY,GAAc,GAAkB,EAAY,MACzD,SAIZ,EAHA,AAAY,EAAM,IAClB,EAAe,AAAY,AAA0B,EAAzB,EAAY,SAE5B,AAtHhB,AAAyB,EAA2B,GAAkB,KAAe,cA4HrF,AAAI,EAAY,KAQZ,EALS,AAAO,AADH,AApIjB,AAAmB,EAA2B,aAqIf,oBAE7B,AAAI,AADU,AAAC,EAAW,GAAc,GAAkB,EAAY,MACxD,SACZ,AAAY,EAAM,IAClB,EAAc,AAAY,AAAyB,EAAxB,EAAW,SAC9B,UAKZ,EAAe,EAAY,MAKhB,AAAO,AAAyB,AADhC,EAAY,KAC2B,SAAhC,EAAQ,qBACf,AAAO,AAA4C,EAA5C,EAA2B,IAAyB,mBAGtE,AAAa,EAA2B,GAAiB,KAIzD,AAAI,EAAO,MAET,AAAK,AAAM,EAAQ,MAInB,AAAK,AAAO,EAAS,AADhB,EAAM,AAAW,MACI,IAAa,KACvC,EAAM,MAEG,AAAO,AAAgB,EAAK,KAArB,EAAK,qBA5FvB,AACE,EAA2B,AAAC,AAAkB,EAAjB,EAAM,IAAyB,SA+F9D,EAAa,KACb,EAAa,KACb,AAAI,IAAM,EAAY,MAzFtB,AACE,EAA2B,AAAC,AAAkB,EAAjB,EAAM,IAAyB,IAC5D,KA2FF,OAAe,EAAK,OAhHpB,AACE,EAA2B,EAAM,MACjC,AAXF,AACE,KAyHiC,EAAK,aAkH7B,AAAO,EAAS,mBAC3B,AAAQ,AAAC,AAAC,KAAoC,GAAY,KAC1D,EAAO,KAIP,AAAI,AAFO,AAzMT,UA4MW,AAAO,EAAS,EAA0B,oBAIrD,AAAI,AAAwB,EAAxB,EAAQ,MACV,EACA,AAAW,OADX,EAAS,QAON,AACE,EAAS,EAA0B,sBAK5C,AAAI,AADO,EAAM,KACN,OAOX,EAAc,AAAmB,EAAW,GAA9B,AAFC,EAAO,KAEG,OACzB,EAAY,KACZ,EAAY,KAIZ,AADO,AAA2C,EAAzB,EAAQ,MACnB,KACd,AAnOE,EACA,MAoOF,AAAY,EAAM,UAiDlB,AAAI,AAAc,AAFA,IAEd,KAA6B,AAAY,EAAc,KAAe,WAE1E,GAAa,KACb,UACwB,EAAK,KAC3B,cACK,AAAc,MAAG,EAAK,KACzB,AAAkB,EAAJ,SAAN,SAD0B,AAAE,WAFF,AAAE,WAY5B,GAAM,GAAU,EAAiB,KAE7C,AAAO,QA3BP,AAAI,EAAQ,SAA+B,eAC3C,AAPI,EACA,AAAC,AAAC,KAAmC,GAAY,GAF9C,EAAQ,UAvIf,AAAI,EAAO,MAET,AAAK,AAAM,EAAQ,MAQnB,AAAK,EAAM,AAAW,AAHJ,AACd,EAAQ,EAAM,EAAW,AAAW,MAAU,GAC9C,EAFc,EAAO,cAIzB,AAAK,AAAO,EAAgB,EAAK,IAAa,KAC9C,EAAM,MAEG,AAAO,AAAgB,EAAK,KAArB,EAAK,qBAKvB,AAAK,WAF0B,EAAM,QA7KrC,AACE,EAA2B,AAAC,AAAkB,AA0LZ,GA1LL,EAAM,IAAyB,QAiL5D,AAAK,AADO,KAAc,EAAO,EAAK,SAMzB,AAAO,AA1MtB,AACE,EAA2B,AAuMpB,AAAW,KAvMe,4BA0MN,AAAS,2BAYtC,AAAgB,OACL,AAAS,AAAC,EAAO,GAAkB,mBAI9C,AAAI,AADY,AAAC,EAAY,GAAc,KAC1B,KACf,EAAe,EAAQ,EAAY,OAGnC,AADY,AAA8D,EAA5C,EAA2B,MAC1C,AAAC,EAAY,GAAkB,MAC9C,AAAY,EAAM,KAIlB,EAAe,EAAY,MArR7B,AAAyB,EAA2B,KAAkB,KAAe,IAsRnF,cAA0B,aAyH5B,AAAI,AAAC,AADO,AAAY,EAAM,AADZ,AAAY,aAnD9B,AAAkB,AAAO,AAAC,AADlB,EAAkB,AAAM,AAAC,AADf,IACqC,GAAM,GAAkB,AAlQ7E,QA2PF,AAEE,EAAQ,AAAC,EAAM,EAAW,AAAW,KAAU,MAF7C,EAAO,SAQsB,KAAU,KAAa,KAEpD,AAAY,AADM,EAAa,UACJ,KAC7B,AAAI,AAAY,IAAe,QAGvB,EAAM,EAAsB,GAAI,EAAqB,KAkDlD,AAAO,AADV,AAA8B,EAAM,wBAGnC,AAAO,AAA+B,EAA9B,KAAe,oBAClC,AAAY,EAAM,IAClB,AAAa,EAAM,EAAc,WEnOjC,AAAI,EAAO,SAAgC,eACzB,AAAkB,EAAlB,KFySlB,AAAI,AAAC,KAAM,GACX,AAAO,AAAgC,EAAM,IAAS,KEzShB,KACtB,KAChB,EAAiB,KACjB,EAAc,KACd,EAAgB,KAChB,AAAO,EAAM,QFwRb,AAAY,AAAkB,EAAM,KACpC,AACE,AAAY,AAAE,EAAM,MAApB,KACA,AAAE,KAAe,yBAEnB,QG3hBE,EAAkB,ICuHpB,AAAI,ADvHM,EAAK,KC8Hf,AAAI,EAAO,KACiB,AACpB,AAAC,EAAM,GAAO,EAAO,MACvB,EAAO,EAAO,KACZ,AAAI,AAAC,KACL,AAAE,OACQ,SAAiB,uBAE7B,EAAO,EAAK,KACV,AAAW,EAAM,AAAU,QAC3B,EAAQ,KACR,EAAQ,KACR,EAAQ,UAId,MACY,SAAiB,mBAC3B,AAAE,YAGsB,AACpB,AAAC,EAAM,GAAO,EAAO,MACvB,EAAO,AAAC,EAAO,GAAK,KAClB,AAAI,AAAC,KACL,AAAU,AAAO,AAAE,OAAT,GAAY,AAAS,EAAM,aAEvC,EAAO,EAAK,KAEV,AAAW,AAAO,AADlB,EAAK,KACM,GAAU,AAAU,EAAM,cAI3C,MACE,AAAU,AAAO,AAAE,OAAT,GAAY,AAAS,EAAM,kBJ+WzC,EAAe,KAAe,MAE9B,AAAY,EAAM,OAZlB,AAAY,AADG,AAAc,EAAM,MACO,GAAgB,EAA2B,GAAgB,KAAe,KACpH,AAAI,EAA4B,MAE9B,AAAU,EAAM,KAElB,QEtQA,AAAI,EAAO,SAAgC,eACpB,EAAS,KFmShC,AAAI,AAAC,KAAM,GEnSsC,AAAkB,EAAlB,KFoSzC,EAAM,MACV,AAA4B,EAAM,AAAe,IAAM,SACrB,IAAM,AAAe,QArE3D,AAAI,AALc,AAAY,MAKX,AAHH,AADA,OACY,QAc5B,AAAI,AADY,AA5ahB,AAAyB,EAA2B,GAAkB,KAAe,WA6arE,KAEd,AAAI,AAAa,EAAb,AADY,EAAY,GAAkB,EAAY,SAExD,AAAY,EAAM,IAGlB,EAAe,AAA0B,EAAzB,EAAY,WAQhC,AAAiB,EAAM,EAAO,SAxB5B,AAAa,EAAM,EAAO,MAkE5B,EAGI,KEtSwB,GAAyB,KAC9C,EAAS,QAMhB,AAAI,EAAM,MA5LV,AAAO,AAAC,AADG,AA6LsB,AAAmB,EAAM,UA5L3C,OAAoB,AAAC,EAAO,GAAK,wBAChD,EAAW,EAAO,MAEP,AAAS,KAAW,oBA0L/B,KAMA,AAAI,EAAM,MAAa,AAAU,AAAmB,EAAM,SK3S1D,AAAO,EAAI,SLmHX,AAAS,AADE,OACK,SAEL,AAAS,KAAW,mBAC/B,AAAI,EAAM,KACR,8BAzDE,EAAM,MAGR,AAAU,AAAmB,EAAM,YAwDtB,AAAS,EAAO,uBAC3B,AA8BM,EAAM,KArBH,AAAO,mBACe,AAC/B,EAAW,AAA2B,EAAK,GAA/B,EAAO,mCG1GgD,KACvE,AAAO,AAAe,MAAgB,MAAc,cACpD,AAAO,AAAe,MAAgB,MAAc,cACpD,AAAO,AAAe,MAAgB,MAAc,cACpD,AAAO,AAAa,AAAC,MAAe,MAA7B,EAAQ,OAAmC,cAEgC,EAA3E,AAAC,AAAC,MAAiB,GAAO,AAAC,MAAiB,IAAO,AAAC,MAAiB","sourceRoot":"./optimized.wasm","sourcesContent":["// Alignment guarantees\n\n// @ts-ignore: decorator\n@inline export const AL_BITS: u32 = 4; // 16 bytes to fit up to v128\n// @ts-ignore: decorator\n@inline export const AL_SIZE: usize = 1 << <usize>AL_BITS;\n// @ts-ignore: decorator\n@inline export const AL_MASK: usize = AL_SIZE - 1;\n\n// Extra debugging\n\n// @ts-ignore: decorator\n@inline export const DEBUG = true;\n// @ts-ignore: decorator\n@inline export const TRACE = false;\n\n// Memory manager\n\n// ╒════════════ Memory manager block layout (32-bit) ═════════════╕\n// 3 2 1\n// 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 bits\n// ├─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┤\n// │ MM info │ -4\n// ╞>ptr═══════════════════════════════════════════════════════════╡\n// │ ... │\n@unmanaged export class BLOCK {\n /** Memory manager info. */\n mmInfo: usize;\n}\n\n/** Overhead of a memory manager block. */\n// @ts-ignore: decorator\n@inline export const BLOCK_OVERHEAD: usize = offsetof<BLOCK>();\n\n/** Maximum size of a memory manager block's payload. */\n// @ts-ignore: decorator\n@inline export const BLOCK_MAXSIZE: usize = (1 << 30) - BLOCK_OVERHEAD;\n\n// Garbage collector\n\n// ╒══════════ Garbage collector object layout (32-bit) ═══════════╕\n// 3 2 1\n// 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 bits\n// ├─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┤\n// │ Memory manager block │ -20\n// ╞═══════════════════════════════════════════════════════════════╡\n// │ GC info │ -16\n// ├───────────────────────────────────────────────────────────────┤\n// │ GC info │ -12\n// ├───────────────────────────────────────────────────────────────┤\n// │ RT id │ -8\n// ├───────────────────────────────────────────────────────────────┤\n// │ RT size │ -4\n// ╞>ptr═══════════════════════════════════════════════════════════╡\n// │ ... │\n@unmanaged export class OBJECT extends BLOCK {\n /** Garbage collector info. */\n gcInfo: u32;\n /** Garbage collector info. */\n gcInfo2: u32;\n /** Runtime class id. */\n rtId: u32;\n /** Runtime object size. */\n rtSize: u32;\n}\n\n/** Overhead of a garbage collector object. Excludes memory manager block overhead. */\n// @ts-ignore: decorator\n@inline export const OBJECT_OVERHEAD: usize = (offsetof<OBJECT>() - BLOCK_OVERHEAD + AL_MASK) & ~AL_MASK;\n\n/** Maximum size of a garbage collector object's payload. */\n// @ts-ignore: decorator\n@inline export const OBJECT_MAXSIZE: usize = BLOCK_MAXSIZE - OBJECT_OVERHEAD;\n\n/** Total of memory manager and garbage collector overhead. */\n// @ts-ignore: decorator\n@inline export const TOTAL_OVERHEAD: usize = BLOCK_OVERHEAD + OBJECT_OVERHEAD;\n","import { AL_BITS, AL_SIZE, AL_MASK, DEBUG, BLOCK, BLOCK_OVERHEAD, BLOCK_MAXSIZE } from \"rt/common\";\nimport { onalloc, onresize, onmove, onfree } from \"./rtrace\";\n\n// === The TLSF (Two-Level Segregate Fit) memory allocator ===\n// see: http://www.gii.upv.es/tlsf/\n\n// - `ffs(x)` is equivalent to `ctz(x)` with x != 0\n// - `fls(x)` is equivalent to `sizeof(x) * 8 - clz(x) - 1`\n\n// ╒══════════════ Block size interpretation (32-bit) ═════════════╕\n// 3 2 1\n// 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 bits\n// ├─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┼─┴─┴─┴─╫─┴─┴─┴─┤\n// │ | FL │ SB = SL + AL │ ◄─ usize\n// └───────────────────────────────────────────────┴───────╨───────┘\n// FL: first level, SL: second level, AL: alignment, SB: small block\n\n// @ts-ignore: decorator\n@inline const SL_BITS: u32 = 4;\n// @ts-ignore: decorator\n@inline const SL_SIZE: u32 = 1 << SL_BITS;\n\n// @ts-ignore: decorator\n@inline const SB_BITS: u32 = SL_BITS + AL_BITS;\n// @ts-ignore: decorator\n@inline const SB_SIZE: u32 = 1 << SB_BITS;\n\n// @ts-ignore: decorator\n@inline const FL_BITS: u32 = 31 - SB_BITS;\n\n// [00]: < 256B (SB) [12]: < 1M\n// [01]: < 512B [13]: < 2M\n// [02]: < 1K [14]: < 4M\n// [03]: < 2K [15]: < 8M\n// [04]: < 4K [16]: < 16M\n// [05]: < 8K [17]: < 32M\n// [06]: < 16K [18]: < 64M\n// [07]: < 32K [19]: < 128M\n// [08]: < 64K [20]: < 256M\n// [09]: < 128K [21]: < 512M\n// [10]: < 256K [22]: <= 1G - OVERHEAD\n// [11]: < 512K\n// VMs limit to 2GB total (currently), making one 1G block max (or three 512M etc.) due to block overhead\n\n// Tags stored in otherwise unused alignment bits\n\n// @ts-ignore: decorator\n@inline const FREE: usize = 1 << 0;\n// @ts-ignore: decorator\n@inline const LEFTFREE: usize = 1 << 1;\n// @ts-ignore: decorator\n@inline const TAGS_MASK: usize = FREE | LEFTFREE; // <= AL_MASK\n\n// ╒════════════════════ Block layout (32-bit) ════════════════════╕\n// 3 2 1\n// 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 bits\n// ├─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┼─┼─┤ ┐\n// │ size │L│F│ ◄─┐ info overhead\n// ╞>ptr═══════════════════════════════════════════════════════╧═╧═╡ │ ┘\n// │ if free: ◄ prev │ ◄─┤ usize\n// ├───────────────────────────────────────────────────────────────┤ │\n// │ if free: next ► │ ◄─┤\n// ├───────────────────────────────────────────────────────────────┤ │\n// │ ... │ │ >= 0\n// ├───────────────────────────────────────────────────────────────┤ │\n// │ if free: back ▲ │ ◄─┘\n// └───────────────────────────────────────────────────────────────┘ >= MIN SIZE\n// F: FREE, L: LEFTFREE\n@unmanaged export class Block extends BLOCK {\n\n /** Previous free block, if any. Only valid if free, otherwise part of payload. */\n prev: Block | null;\n /** Next free block, if any. Only valid if free, otherwise part of payload. */\n next: Block | null;\n\n // If the block is free, there is a 'back'reference at its end pointing at its start.\n}\n\n// Block constants. A block must have a minimum size of three pointers so it can hold `prev`,\n// `next` and `back` if free.\n\n// @ts-ignore: decorator\n@inline const BLOCK_MINSIZE: usize = ((3 * sizeof<usize>() + BLOCK_OVERHEAD + AL_MASK) & ~AL_MASK) - BLOCK_OVERHEAD; // prev + next + back\n// @ts-ignore: decorator\n// @inline const BLOCK_MAXSIZE: usize = 1 << (FL_BITS + SB_BITS - 1); // exclusive, lives in common.ts\n\n/** Gets the left block of a block. Only valid if the left block is free. */\n// @ts-ignore: decorator\n@inline function GETFREELEFT(block: Block): Block {\n return load<Block>(changetype<usize>(block) - sizeof<usize>());\n}\n\n/** Gets the right block of a block by advancing to the right by its size. */\n// @ts-ignore: decorator\n@inline function GETRIGHT(block: Block): Block {\n return changetype<Block>(changetype<usize>(block) + BLOCK_OVERHEAD + (block.mmInfo & ~TAGS_MASK));\n}\n\n// ╒═════════════════════ Root layout (32-bit) ════════════════════╕\n// 3 2 1\n// 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 bits\n// ├─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┤ ┐\n// │ 0 | flMap S│ ◄────┐\n// ╞═══════════════════════════════════════════════════════════════╡ │\n// │ slMap[0] S │ ◄─┐ │\n// ├───────────────────────────────────────────────────────────────┤ │ │\n// │ slMap[1] │ ◄─┤ │\n// ├───────────────────────────────────────────────────────────────┤ u32 │\n// │ slMap[22] │ ◄─┘ │\n// ╞═══════════════════════════════════════════════════════════════╡ usize\n// │ head[0] │ ◄────┤\n// ├───────────────────────────────────────────────────────────────┤ │\n// │ ... │ ◄────┤\n// ├───────────────────────────────────────────────────────────────┤ │\n// │ head[367] │ ◄────┤\n// ╞═══════════════════════════════════════════════════════════════╡ │\n// │ tail │ ◄────┘\n// └───────────────────────────────────────────────────────────────┘ SIZE ┘\n// S: Small blocks map\n@unmanaged class Root {\n /** First level bitmap. */\n flMap: usize;\n}\n\n// Root constants. Where stuff is stored inside of the root structure.\n\n// @ts-ignore: decorator\n@inline const SL_START: usize = sizeof<usize>();\n// @ts-ignore: decorator\n@inline const SL_END: usize = SL_START + (FL_BITS << alignof<u32>());\n// @ts-ignore: decorator\n@inline const HL_START: usize = (SL_END + AL_MASK) & ~AL_MASK;\n// @ts-ignore: decorator\n@inline const HL_END: usize = HL_START + FL_BITS * SL_SIZE * sizeof<usize>();\n// @ts-ignore: decorator\n@inline const ROOT_SIZE: usize = HL_END + sizeof<usize>();\n\n// @ts-ignore: decorator\n@lazy export var ROOT: Root;\n\n/** Gets the second level map of the specified first level. */\n// @ts-ignore: decorator\n@inline function GETSL(root: Root, fl: usize): u32 {\n return load<u32>(\n changetype<usize>(root) + (fl << alignof<u32>()),\n SL_START\n );\n}\n\n/** Sets the second level map of the specified first level. */\n// @ts-ignore: decorator\n@inline function SETSL(root: Root, fl: usize, slMap: u32): void {\n store<u32>(\n changetype<usize>(root) + (fl << alignof<u32>()),\n slMap,\n SL_START\n );\n}\n\n/** Gets the head of the free list for the specified combination of first and second level. */\n// @ts-ignore: decorator\n@inline function GETHEAD(root: Root, fl: usize, sl: u32): Block | null {\n return load<Block>(\n changetype<usize>(root) + (((fl << SL_BITS) + <usize>sl) << alignof<usize>()),\n HL_START\n );\n}\n\n/** Sets the head of the free list for the specified combination of first and second level. */\n// @ts-ignore: decorator\n@inline function SETHEAD(root: Root, fl: usize, sl: u32, head: Block | null): void {\n store<Block | null>(\n changetype<usize>(root) + (((fl << SL_BITS) + <usize>sl) << alignof<usize>()),\n head,\n HL_START\n );\n}\n\n/** Gets the tail block.. */\n// @ts-ignore: decorator\n@inline function GETTAIL(root: Root): Block {\n return load<Block>(\n changetype<usize>(root),\n HL_END\n );\n}\n\n/** Sets the tail block. */\n// @ts-ignore: decorator\n@inline function SETTAIL(root: Root, tail: Block): void {\n store<Block>(\n changetype<usize>(root),\n tail,\n HL_END\n );\n}\n\n/** Inserts a previously used block back into the free list. */\nfunction insertBlock(root: Root, block: Block): void {\n if (DEBUG) assert(block); // cannot be null\n var blockInfo = block.mmInfo;\n if (DEBUG) assert(blockInfo & FREE); // must be free\n\n var right = GETRIGHT(block);\n var rightInfo = right.mmInfo;\n\n // merge with right block if also free\n if (rightInfo & FREE) {\n let newSize = (blockInfo & ~TAGS_MASK) + BLOCK_OVERHEAD + (rightInfo & ~TAGS_MASK);\n if (newSize < BLOCK_MAXSIZE) {\n removeBlock(root, right);\n block.mmInfo = blockInfo = (blockInfo & TAGS_MASK) | newSize;\n right = GETRIGHT(block);\n rightInfo = right.mmInfo;\n // 'back' is set below\n }\n }\n\n // merge with left block if also free\n if (blockInfo & LEFTFREE) {\n let left = GETFREELEFT(block);\n let leftInfo = left.mmInfo;\n if (DEBUG) assert(leftInfo & FREE); // must be free according to right tags\n let newSize = (leftInfo & ~TAGS_MASK) + BLOCK_OVERHEAD + (blockInfo & ~TAGS_MASK);\n if (newSize < BLOCK_MAXSIZE) {\n removeBlock(root, left);\n left.mmInfo = blockInfo = (leftInfo & TAGS_MASK) | newSize;\n block = left;\n // 'back' is set below\n }\n }\n\n right.mmInfo = rightInfo | LEFTFREE;\n // reference to right is no longer used now, hence rightInfo is not synced\n\n // we now know the size of the block\n var size = blockInfo & ~TAGS_MASK;\n if (DEBUG) assert(size >= BLOCK_MINSIZE && size < BLOCK_MAXSIZE); // must be a valid size\n if (DEBUG) assert(changetype<usize>(block) + BLOCK_OVERHEAD + size == changetype<usize>(right)); // must match\n\n // set 'back' to itself at the end of block\n store<Block>(changetype<usize>(right) - sizeof<usize>(), block);\n\n // mapping_insert\n var fl: usize, sl: u32;\n if (size < SB_SIZE) {\n fl = 0;\n sl = <u32>(size >> AL_BITS);\n } else {\n const inv: usize = sizeof<usize>() * 8 - 1;\n fl = inv - clz<usize>(size);\n sl = <u32>((size >> (fl - SL_BITS)) ^ (1 << SL_BITS));\n fl -= SB_BITS - 1;\n }\n if (DEBUG) assert(fl < FL_BITS && sl < SL_SIZE); // fl/sl out of range\n\n // perform insertion\n var head = GETHEAD(root, fl, sl);\n block.prev = null;\n block.next = head;\n if (head) head.prev = block;\n SETHEAD(root, fl, sl, block);\n\n // update first and second level maps\n root.flMap |= (1 << fl);\n SETSL(root, fl, GETSL(root, fl) | (1 << sl));\n}\n\n/** Removes a free block from internal lists. */\nfunction removeBlock(root: Root, block: Block): void {\n var blockInfo = block.mmInfo;\n if (DEBUG) assert(blockInfo & FREE); // must be free\n var size = blockInfo & ~TAGS_MASK;\n if (DEBUG) assert(size >= BLOCK_MINSIZE && size < BLOCK_MAXSIZE); // must be valid\n\n // mapping_insert\n var fl: usize, sl: u32;\n if (size < SB_SIZE) {\n fl = 0;\n sl = <u32>(size >> AL_BITS);\n } else {\n const inv: usize = sizeof<usize>() * 8 - 1;\n fl = inv - clz<usize>(size);\n sl = <u32>((size >> (fl - SL_BITS)) ^ (1 << SL_BITS));\n fl -= SB_BITS - 1;\n }\n if (DEBUG) assert(fl < FL_BITS && sl < SL_SIZE); // fl/sl out of range\n\n // link previous and next free block\n var prev = block.prev;\n var next = block.next;\n if (prev) prev.next = next;\n if (next) next.prev = prev;\n\n // update head if we are removing it\n if (block == GETHEAD(root, fl, sl)) {\n SETHEAD(root, fl, sl, next);\n\n // clear second level map if head is empty now\n if (!next) {\n let slMap = GETSL(root, fl);\n SETSL(root, fl, slMap &= ~(1 << sl));\n\n // clear first level map if second level is empty now\n if (!slMap) root.flMap &= ~(1 << fl);\n }\n }\n // note: does not alter left/back because it is likely that splitting\n // is performed afterwards, invalidating those changes. so, the caller\n // must perform those updates.\n}\n\n/** Searches for a free block of at least the specified size. */\nfunction searchBlock(root: Root, size: usize): Block | null {\n // size was already asserted by caller\n\n // mapping_search\n var fl: usize, sl: u32;\n if (size < SB_SIZE) {\n fl = 0;\n sl = <u32>(size >> AL_BITS);\n } else {\n const halfMaxSize = BLOCK_MAXSIZE >> 1; // don't round last fl\n const inv: usize = sizeof<usize>() * 8 - 1;\n const invRound = inv - SL_BITS;\n let requestSize = size < halfMaxSize\n ? size + (1 << (invRound - clz<usize>(size))) - 1\n : size;\n fl = inv - clz<usize>(requestSize);\n sl = <u32>((requestSize >> (fl - SL_BITS)) ^ (1 << SL_BITS));\n fl -= SB_BITS - 1;\n }\n if (DEBUG) assert(fl < FL_BITS && sl < SL_SIZE); // fl/sl out of range\n\n // search second level\n var slMap = GETSL(root, fl) & (~0 << sl);\n var head: Block | null = null;\n if (!slMap) {\n // search next larger first level\n let flMap = root.flMap & (~0 << (fl + 1));\n if (!flMap) {\n head = null;\n } else {\n fl = ctz<usize>(flMap);\n slMap = GETSL(root, fl);\n if (DEBUG) assert(slMap); // can't be zero if fl points here\n head = GETHEAD(root, fl, ctz<u32>(slMap));\n }\n } else {\n head = GETHEAD(root, fl, ctz<u32>(slMap));\n }\n return head;\n}\n\n/** Prepares the specified block before (re-)use, possibly splitting it. */\nfunction prepareBlock(root: Root, block: Block, size: usize): void {\n // size was already asserted by caller\n\n var blockInfo = block.mmInfo;\n if (DEBUG) assert(!((size + BLOCK_OVERHEAD) & AL_MASK)); // size must be aligned so the new block is\n\n // split if the block can hold another MINSIZE block incl. overhead\n var remaining = (blockInfo & ~TAGS_MASK) - size;\n if (remaining >= BLOCK_OVERHEAD + BLOCK_MINSIZE) {\n block.mmInfo = size | (blockInfo & LEFTFREE); // also discards FREE\n\n let spare = changetype<Block>(changetype<usize>(block) + BLOCK_OVERHEAD + size);\n spare.mmInfo = (remaining - BLOCK_OVERHEAD) | FREE; // not LEFTFREE\n insertBlock(root, spare); // also sets 'back'\n\n // otherwise tag block as no longer FREE and right as no longer LEFTFREE\n } else {\n block.mmInfo = blockInfo & ~FREE;\n GETRIGHT(block).mmInfo &= ~LEFTFREE;\n }\n}\n\n/** Adds more memory to the pool. */\nfunction addMemory(root: Root, start: usize, end: usize): bool {\n if (DEBUG) assert(start <= end); // must be valid\n start = ((start + BLOCK_OVERHEAD + AL_MASK) & ~AL_MASK) - BLOCK_OVERHEAD;\n end &= ~AL_MASK;\n\n var tail = GETTAIL(root);\n var tailInfo: usize = 0;\n if (tail) { // more memory\n if (DEBUG) assert(start >= changetype<usize>(tail) + BLOCK_OVERHEAD);\n\n // merge with current tail if adjacent\n const offsetToTail = AL_SIZE;\n if (start - offsetToTail == changetype<usize>(tail)) {\n start -= offsetToTail;\n tailInfo = tail.mmInfo;\n } else {\n // We don't do this, but a user might `memory.grow` manually\n // leading to non-adjacent pages managed by TLSF.\n }\n\n } else if (DEBUG) { // first memory\n assert(start >= changetype<usize>(root) + ROOT_SIZE); // starts after root\n }\n\n // check if size is large enough for a free block and the tail block\n var size = end - start;\n if (size < BLOCK_OVERHEAD + BLOCK_MINSIZE + BLOCK_OVERHEAD) {\n return false;\n }\n\n // left size is total minus its own and the zero-length tail's header\n var leftSize = size - 2 * BLOCK_OVERHEAD;\n var left = changetype<Block>(start);\n left.mmInfo = leftSize | FREE | (tailInfo & LEFTFREE);\n left.prev = null;\n left.next = null;\n\n // tail is a zero-length used block\n tail = changetype<Block>(start + BLOCK_OVERHEAD + leftSize);\n tail.mmInfo = 0 | LEFTFREE;\n SETTAIL(root, tail);\n\n insertBlock(root, left); // also merges with free left before tail / sets 'back'\n\n return true;\n}\n\n/** Grows memory to fit at least another block of the specified size. */\nfunction growMemory(root: Root, size: usize): void {\n if (ASC_LOW_MEMORY_LIMIT) {\n unreachable();\n return;\n }\n // Here, both rounding performed in searchBlock ...\n const halfMaxSize = BLOCK_MAXSIZE >> 1;\n if (size < halfMaxSize) { // don't round last fl\n const invRound = (sizeof<usize>() * 8 - 1) - SL_BITS;\n size += (1 << (invRound - clz<usize>(size))) - 1;\n }\n // and additional BLOCK_OVERHEAD must be taken into account. If we are going\n // to merge with the tail block, that's one time, otherwise it's two times.\n var pagesBefore = memory.size();\n size += BLOCK_OVERHEAD << usize((<usize>pagesBefore << 16) - BLOCK_OVERHEAD != changetype<usize>(GETTAIL(root)));\n var pagesNeeded = <i32>(((size + 0xffff) & ~0xffff) >>> 16);\n var pagesWanted = max(pagesBefore, pagesNeeded); // double memory\n if (memory.grow(pagesWanted) < 0) {\n if (memory.grow(pagesNeeded) < 0) unreachable();\n }\n var pagesAfter = memory.size();\n addMemory(root, <usize>pagesBefore << 16, <usize>pagesAfter << 16);\n}\n\n/** Computes the size (excl. header) of a block. */\nfunction computeSize(size: usize): usize {\n // Size must be large enough and aligned minus preceeding overhead\n return size <= BLOCK_MINSIZE\n ? BLOCK_MINSIZE\n : ((size + BLOCK_OVERHEAD + AL_MASK) & ~AL_MASK) - BLOCK_OVERHEAD;\n}\n\n/** Prepares and checks an allocation size. */\nfunction prepareSize(size: usize): usize {\n if (size >= BLOCK_MAXSIZE) throw new Error(\"allocation too large\");\n return computeSize(size);\n}\n\n/** Initializes the root structure. */\nfunction initialize(): void {\n var rootOffset = (__heap_base + AL_MASK) & ~AL_MASK;\n var pagesBefore = memory.size();\n var pagesNeeded = <i32>((((rootOffset + ROOT_SIZE) + 0xffff) & ~0xffff) >>> 16);\n if (pagesNeeded > pagesBefore && memory.grow(pagesNeeded - pagesBefore) < 0) unreachable();\n var root = changetype<Root>(rootOffset);\n root.flMap = 0;\n SETTAIL(root, changetype<Block>(0));\n for (let fl: usize = 0; fl < FL_BITS; ++fl) {\n SETSL(root, fl, 0);\n for (let sl: u32 = 0; sl < SL_SIZE; ++sl) {\n SETHEAD(root, fl, sl, null);\n }\n }\n var memStart = rootOffset + ROOT_SIZE;\n if (ASC_LOW_MEMORY_LIMIT) {\n const memEnd = <usize>ASC_LOW_MEMORY_LIMIT & ~AL_MASK;\n if (memStart <= memEnd) addMemory(root, memStart, memEnd);\n else unreachable(); // low memory limit already exceeded\n } else {\n addMemory(root, memStart, memory.size() << 16);\n }\n ROOT = root;\n}\n\n/** Allocates a block of the specified size. */\nexport function allocateBlock(root: Root, size: usize): Block {\n var payloadSize = prepareSize(size);\n var block = searchBlock(root, payloadSize);\n if (!block) {\n growMemory(root, payloadSize);\n block = changetype<Block>(searchBlock(root, payloadSize));\n if (DEBUG) assert(block); // must be found now\n }\n if (DEBUG) assert((block.mmInfo & ~TAGS_MASK) >= payloadSize); // must fit\n removeBlock(root, <Block>block);\n prepareBlock(root, <Block>block, payloadSize);\n if (isDefined(ASC_RTRACE)) onalloc(block);\n return <Block>block;\n}\n\n/** Reallocates a block to the specified size. */\nexport function reallocateBlock(root: Root, block: Block, size: usize): Block {\n var payloadSize = prepareSize(size);\n var blockInfo = block.mmInfo;\n var blockSize = blockInfo & ~TAGS_MASK;\n\n // possibly split and update runtime size if it still fits\n if (payloadSize <= blockSize) {\n prepareBlock(root, block, payloadSize);\n if (isDefined(ASC_RTRACE)) {\n if (payloadSize != blockSize) onresize(block, blockSize);\n }\n return block;\n }\n\n // merge with right free block if merger is large enough\n var right = GETRIGHT(block);\n var rightInfo = right.mmInfo;\n if (rightInfo & FREE) {\n let mergeSize = blockSize + BLOCK_OVERHEAD + (rightInfo & ~TAGS_MASK);\n if (mergeSize >= payloadSize) {\n removeBlock(root, right);\n // TODO: this can yield an intermediate block larger than BLOCK_MAXSIZE, which\n // is immediately split though. does this trigger any assertions / issues?\n block.mmInfo = (blockInfo & TAGS_MASK) | mergeSize;\n prepareBlock(root, block, payloadSize);\n if (isDefined(ASC_RTRACE)) onresize(block, blockSize);\n return block;\n }\n }\n\n // otherwise move the block\n return moveBlock(root, block, size);\n}\n\n/** Moves a block to a new one of the specified size. */\nfunction moveBlock(root: Root, block: Block, newSize: usize): Block {\n var newBlock = allocateBlock(root, newSize);\n memory.copy(changetype<usize>(newBlock) + BLOCK_OVERHEAD, changetype<usize>(block) + BLOCK_OVERHEAD, block.mmInfo & ~TAGS_MASK);\n if (changetype<usize>(block) >= __heap_base) {\n if (isDefined(ASC_RTRACE)) onmove(block, newBlock);\n freeBlock(root, block);\n }\n return newBlock;\n}\n\n/** Frees a block. */\nexport function freeBlock(root: Root, block: Block): void {\n block.mmInfo = block.mmInfo | FREE;\n if (isDefined(ASC_RTRACE)) onfree(block);\n insertBlock(root, block);\n}\n\n/** Checks that a used block is valid to be freed or reallocated. */\nfunction checkUsedBlock(ptr: usize): Block {\n var block = changetype<Block>(ptr - BLOCK_OVERHEAD);\n assert(\n ptr != 0 && !(ptr & AL_MASK) && // must exist and be aligned\n !(block.mmInfo & FREE) // must be used\n );\n return block;\n}\n\n// @ts-ignore: decorator\n@global @unsafe\nexport function __alloc(size: usize): usize {\n if (!ROOT) initialize();\n return changetype<usize>(allocateBlock(ROOT, size)) + BLOCK_OVERHEAD;\n}\n\n// @ts-ignore: decorator\n@global @unsafe\nexport function __realloc(ptr: usize, size: usize): usize {\n if (!ROOT) initialize();\n return (ptr < __heap_base\n ? changetype<usize>(moveBlock(ROOT, checkUsedBlock(ptr), size))\n : changetype<usize>(reallocateBlock(ROOT, checkUsedBlock(ptr), size))\n ) + BLOCK_OVERHEAD;\n}\n\n// @ts-ignore: decorator\n@global @unsafe\nexport function __free(ptr: usize): void {\n if (ptr < __heap_base) return;\n if (!ROOT) initialize();\n freeBlock(ROOT, checkUsedBlock(ptr));\n}\n\n// @ts-ignore: decorator\n@global @unsafe\nexport function __reset(): void {\n throw new Error(\"not implemented\");\n}\n","// This file is shared with the compiler and must remain portable\n\n// ╒═══════════════════ Typeinfo interpretation ═══════════════════╕\n// 3 2 1\n// 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 bits\n// ├─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┤ ◄─ __rtti_base\n// │ count │\n// ╞═══════════════════════════════════════════════════════════════╡ ┐\n// │ Typeinfo#flags [id=0] │ id < count\n// ├ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┤\n// │ Typeinfo#base [id=0] │\n// ├───────────────────────────────────────────────────────────────┤\n// │ ... │\n\n/** Runtime type information data structure. */\n@unmanaged\nexport class Typeinfo {\n /** Flags describing the shape of this class type. */\n flags: TypeinfoFlags = TypeinfoFlags.NONE;\n /** Base class id or `0` if none. */\n base: u32 = 0;\n}\n\n/** Runtime type information flags. */\nexport const enum TypeinfoFlags {\n /** No specific flags. */\n NONE = 0,\n /** Type is an `ArrayBufferView`. */\n ARRAYBUFFERVIEW = 1 << 0,\n /** Type is an `Array`. */\n ARRAY = 1 << 1,\n /** Type is a `StaticArray`. */\n STATICARRAY = 1 << 2,\n /** Type is a `Set`. */\n SET = 1 << 3,\n /** Type is a `Map`. */\n MAP = 1 << 4,\n /** Type is inherently acyclic. */\n ACYCLIC = 1 << 5,\n /** Value alignment of 1 byte. */\n VALUE_ALIGN_0 = 1 << 6,\n /** Value alignment of 2 bytes. */\n VALUE_ALIGN_1 = 1 << 7,\n /** Value alignment of 4 bytes. */\n VALUE_ALIGN_2 = 1 << 8,\n /** Value alignment of 8 bytes. */\n VALUE_ALIGN_3 = 1 << 9,\n /** Value alignment of 16 bytes. */\n VALUE_ALIGN_4 = 1 << 10,\n /** Value is a signed type. */\n VALUE_SIGNED = 1 << 11,\n /** Value is a float type. */\n VALUE_FLOAT = 1 << 12,\n /** Value type is nullable. */\n VALUE_NULLABLE = 1 << 13,\n /** Value type is managed. */\n VALUE_MANAGED = 1 << 14,\n /** Key alignment of 1 byte. */\n KEY_ALIGN_0 = 1 << 15,\n /** Key alignment of 2 bytes. */\n KEY_ALIGN_1 = 1 << 16,\n /** Key alignment of 4 bytes. */\n KEY_ALIGN_2 = 1 << 17,\n /** Key alignment of 8 bytes. */\n KEY_ALIGN_3 = 1 << 18,\n /** Key alignment of 16 bytes. */\n KEY_ALIGN_4 = 1 << 19,\n /** Key is a signed type. */\n KEY_SIGNED = 1 << 20,\n /** Key is a float type. */\n KEY_FLOAT = 1 << 21,\n /** Key type is nullable. */\n KEY_NULLABLE = 1 << 22,\n /** Key type is managed. */\n KEY_MANAGED = 1 << 23\n}\n","import { BLOCK_OVERHEAD, DEBUG, OBJECT, OBJECT_MAXSIZE, OBJECT_OVERHEAD, TOTAL_OVERHEAD } from \"rt/common\";\nimport { Block, freeBlock, ROOT } from \"rt/tlsf\";\nimport { TypeinfoFlags } from \"shared/typeinfo\";\nimport { onincrement, ondecrement } from \"./rtrace\";\n\n// === A Pure Reference Counting Garbage Collector ===\n// see: https://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon03Pure.pdf\n\n// ╒══════════════════════ GC Info structure ══════════════════════╕\n// │ 3 2 1 │\n// │1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0│\n// ├─┼─┴─┴─┼─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┴─┤\n// │B│color│ refCount │\n// └─┴─────┴───────────────────────────────────────────────────────┘\n// B: buffered\n\n// @ts-ignore: decorator\n@inline const BUFFERED_MASK: u32 = 1 << ((sizeof<u32>() * 8) - 1);\n// @ts-ignore: decorator\n@inline const COLOR_BITS = 3;\n// @ts-ignore: decorator\n@inline const COLOR_SHIFT: u32 = ctz(BUFFERED_MASK) - COLOR_BITS;\n// @ts-ignore: decorator\n@inline const COLOR_MASK: u32 = ((1 << COLOR_BITS) - 1) << COLOR_SHIFT;\n// @ts-ignore: decorator\n@inline export const REFCOUNT_MASK: u32 = (1 << COLOR_SHIFT) - 1;\n\n// ╒════════╤═══════════════════ Colors ═══════════════════════════╕\n// │ Color │ Meaning │\n// ├────────┼──────────────────────────────────────────────────────┤\n// │ BLACK │ In use or free │\n// │ GRAY │ Possible member of cycle │\n// │ WHITE │ Member of garbage cycle │\n// │ PURPLE │ Possible root of cycle │\n// │ RED │ Candidate cycle undergoing Σ-computation *concurrent │\n// │ ORANGE │ Candidate cycle awaiting epoch boundary *concurrent │\n// └────────┴──────────────────────────────────────────────────────┘\n// Acyclic detection has been decoupled, hence no GREEN.\n\n// @ts-ignore: decorator\n@inline const COLOR_BLACK: u32 = 0 << COLOR_SHIFT;\n// @ts-ignore: decorator\n@inline const COLOR_GRAY: u32 = 1 << COLOR_SHIFT;\n// @ts-ignore: decorator\n@inline const COLOR_WHITE: u32 = 2 << COLOR_SHIFT;\n// @ts-ignore: decorator\n@inline const COLOR_PURPLE: u32 = 3 << COLOR_SHIFT;\n// @ts-ignore: decorator\n// @inline const COLOR_RED: u32 = 4 << COLOR_SHIFT;\n// @ts-ignore: decorator\n// @inline const COLOR_ORANGE: u32 = 5 << COLOR_SHIFT;\n\n// @ts-ignore: decorator\n@inline const VISIT_DECREMENT = 1; // guard 0\n// @ts-ignore: decorator\n@inline const VISIT_MARKGRAY = 2;\n// @ts-ignore: decorator\n@inline const VISIT_SCAN = 3;\n// @ts-ignore: decorator\n@inline const VISIT_SCANBLACK = 4;\n// @ts-ignore: decorator\n@inline const VISIT_COLLECTWHITE = 5;\n\n// @ts-ignore: decorator\n@global @unsafe @lazy\nfunction __visit(ref: usize, cookie: i32): void { // eslint-disable-line @typescript-eslint/no-unused-vars\n if (ref < __heap_base) return;\n if (isDefined(__GC_ALL_ACYCLIC)) {\n if (DEBUG) assert(cookie == VISIT_DECREMENT);\n decrement(changetype<OBJECT>(ref - TOTAL_OVERHEAD));\n } else {\n let s = changetype<OBJECT>(ref - TOTAL_OVERHEAD);\n switch (cookie) {\n case VISIT_DECREMENT: {\n decrement(s);\n break;\n }\n case VISIT_MARKGRAY: {\n if (DEBUG) assert((s.gcInfo & REFCOUNT_MASK) > 0);\n s.gcInfo = s.gcInfo - 1;\n markGray(s);\n break;\n }\n case VISIT_SCAN: {\n scan(s);\n break;\n }\n case VISIT_SCANBLACK: {\n let info = s.gcInfo;\n assert((info & ~REFCOUNT_MASK) == ((info + 1) & ~REFCOUNT_MASK)); // overflow\n s.gcInfo = info + 1;\n if ((info & COLOR_MASK) != COLOR_BLACK) {\n scanBlack(s);\n }\n break;\n }\n case VISIT_COLLECTWHITE: {\n collectWhite(s);\n break;\n }\n default: if (DEBUG) assert(false);\n }\n }\n}\n\n/** Increments the reference count of the specified block by one.*/\nfunction increment(s: OBJECT): void {\n var info = s.gcInfo;\n assert((info & ~REFCOUNT_MASK) == ((info + 1) & ~REFCOUNT_MASK)); // overflow\n s.gcInfo = info + 1;\n if (isDefined(ASC_RTRACE)) onincrement(s);\n if (DEBUG) assert(!(s.mmInfo & 1)); // used\n}\n\n/** Decrements the reference count of the specified block by one, possibly freeing it. */\n// @ts-ignore: decorator\n@lazy\nfunction decrement(s: OBJECT): void {\n var info = s.gcInfo;\n var rc = info & REFCOUNT_MASK;\n if (isDefined(ASC_RTRACE)) ondecrement(s);\n if (DEBUG) assert(!(s.mmInfo & 1)); // used\n if (rc == 1) {\n __visit_members(changetype<usize>(s) + TOTAL_OVERHEAD, VISIT_DECREMENT);\n if (isDefined(__GC_ALL_ACYCLIC)) {\n if (DEBUG) assert(!(info & BUFFERED_MASK));\n finalize(s);\n } else {\n if (!(info & BUFFERED_MASK)) {\n finalize(s);\n } else {\n s.gcInfo = BUFFERED_MASK | COLOR_BLACK | 0;\n }\n }\n } else {\n if (DEBUG) assert(rc > 0);\n if (isDefined(__GC_ALL_ACYCLIC)) {\n s.gcInfo = (info & ~REFCOUNT_MASK) | (rc - 1);\n } else {\n if (!(__typeinfo(s.rtId) & TypeinfoFlags.ACYCLIC)) {\n s.gcInfo = BUFFERED_MASK | COLOR_PURPLE | (rc - 1);\n if (!(info & BUFFERED_MASK)) {\n appendRoot(s);\n }\n } else {\n s.gcInfo = (info & ~REFCOUNT_MASK) | (rc - 1);\n }\n }\n }\n}\n\n/** Finalizes the specified block, giving it back to the memory manager. */\nfunction finalize(s: OBJECT): void {\n if (isDefined(__finalize)) {\n __finalize(changetype<usize>(s) + TOTAL_OVERHEAD);\n }\n freeBlock(ROOT, changetype<Block>(s));\n}\n\n/** Buffer of possible roots. */\n// @ts-ignore: decorator\n@lazy var ROOTS: usize;\n/** Current absolute offset into the `ROOTS` buffer. */\n// @ts-ignore: decorator\n@lazy var CUR: usize = 0;\n/** Current absolute end offset into the `ROOTS` buffer. */\n// @ts-ignore: decorator\n@lazy var END: usize = 0;\n\n/** Appends a block to possible roots. */\nfunction appendRoot(s: OBJECT): void {\n var cur = CUR;\n if (cur >= END) {\n growRoots(); // TBD: either that or pick a default and force collection on overflow\n cur = CUR;\n }\n store<OBJECT>(cur, s);\n CUR = cur + sizeof<usize>();\n}\n\n/** Grows the roots buffer if it ran full. */\nfunction growRoots(): void {\n var oldRoots = ROOTS;\n var oldSize = CUR - oldRoots;\n var newSize = max(oldSize * 2, 64 << alignof<usize>());\n var newRoots = __alloc(newSize);\n memory.copy(newRoots, oldRoots, oldSize);\n if (oldRoots) __free(oldRoots);\n ROOTS = newRoots;\n CUR = newRoots + oldSize;\n END = newRoots + newSize;\n}\n\n/** Collects cyclic garbage. */\n// @ts-ignore: decorator\n@global @unsafe @lazy\nexport function __collect(): void {\n if (isDefined(__GC_ALL_ACYCLIC)) return;\n\n // markRoots\n var roots = ROOTS;\n var cur = roots;\n for (let pos = cur, end = CUR; pos < end; pos += sizeof<usize>()) {\n let s = load<OBJECT>(pos);\n let info = s.gcInfo;\n if ((info & COLOR_MASK) == COLOR_PURPLE && (info & REFCOUNT_MASK) > 0) {\n markGray(s);\n store<OBJECT>(cur, s);\n cur += sizeof<usize>();\n } else {\n if ((info & COLOR_MASK) == COLOR_BLACK && !(info & REFCOUNT_MASK)) {\n finalize(s);\n } else {\n s.gcInfo = info & ~BUFFERED_MASK;\n }\n }\n }\n CUR = cur;\n\n // scanRoots\n for (let pos = roots; pos < cur; pos += sizeof<usize>()) {\n scan(load<OBJECT>(pos));\n }\n\n // collectRoots\n for (let pos = roots; pos < cur; pos += sizeof<usize>()) {\n let s = load<OBJECT>(pos);\n s.gcInfo = s.gcInfo & ~BUFFERED_MASK;\n collectWhite(s);\n }\n CUR = roots;\n}\n\n/** Marks a block as gray (possible member of cycle) during the collection phase. */\nfunction markGray(s: OBJECT): void {\n var info = s.gcInfo;\n if ((info & COLOR_MASK) != COLOR_GRAY) {\n s.gcInfo = (info & ~COLOR_MASK) | COLOR_GRAY;\n __visit_members(changetype<usize>(s) + TOTAL_OVERHEAD, VISIT_MARKGRAY);\n }\n}\n\n/** Scans a block during the collection phase, determining whether it is garbage or not. */\nfunction scan(s: OBJECT): void {\n var info = s.gcInfo;\n if ((info & COLOR_MASK) == COLOR_GRAY) {\n if ((info & REFCOUNT_MASK) > 0) {\n scanBlack(s);\n } else {\n s.gcInfo = (info & ~COLOR_MASK) | COLOR_WHITE;\n __visit_members(changetype<usize>(s) + TOTAL_OVERHEAD, VISIT_SCAN);\n }\n }\n}\n\n/** Marks a block as black (in use) if it was found to be reachable during the collection phase. */\nfunction scanBlack(s: OBJECT): void {\n s.gcInfo = (s.gcInfo & ~COLOR_MASK) | COLOR_BLACK;\n __visit_members(changetype<usize>(s) + TOTAL_OVERHEAD, VISIT_SCANBLACK);\n}\n\n/** Collects all white (member of a garbage cycle) nodes when completing the collection phase. */\nfunction collectWhite(s: OBJECT): void {\n var info = s.gcInfo;\n if ((info & COLOR_MASK) == COLOR_WHITE && !(info & BUFFERED_MASK)) {\n s.gcInfo = (info & ~COLOR_MASK) | COLOR_BLACK;\n __visit_members(changetype<usize>(s) + TOTAL_OVERHEAD, VISIT_COLLECTWHITE);\n finalize(s);\n }\n}\n\n// @ts-ignore: decorator\n@global @unsafe\nexport function __new(size: usize, id: u32): usize {\n if (size > OBJECT_MAXSIZE) throw new Error(\"allocation too large\");\n var ptr = __alloc(OBJECT_OVERHEAD + size);\n var object = changetype<OBJECT>(ptr - BLOCK_OVERHEAD);\n object.gcInfo = 0; // RC=0\n object.gcInfo2 = 0;\n object.rtId = id;\n object.rtSize = <u32>size;\n return ptr + OBJECT_OVERHEAD;\n}\n\n// @ts-ignore: decorator\n@global @unsafe\nexport function __renew(oldPtr: usize, size: usize): usize {\n if (size > OBJECT_MAXSIZE) throw new Error(\"allocation too large\");\n var newPtr = __realloc(oldPtr - OBJECT_OVERHEAD, OBJECT_OVERHEAD + size);\n changetype<OBJECT>(newPtr - BLOCK_OVERHEAD).rtSize = <u32>size;\n return newPtr + OBJECT_OVERHEAD;\n}\n\n// @ts-ignore: decorator\n@global @unsafe\nexport function __retain(ptr: usize): usize {\n if (ptr > __heap_base) increment(changetype<OBJECT>(ptr - TOTAL_OVERHEAD));\n return ptr;\n}\n\n// @ts-ignore: decorator\n@global @unsafe\nexport function __release(ptr: usize): void {\n if (ptr > __heap_base) decrement(changetype<OBJECT>(ptr - TOTAL_OVERHEAD));\n}\n","import { memcmp, memmove, memset } from \"./util/memory\";\nimport { E_NOTIMPLEMENTED } from \"./util/error\";\n\n/** Memory manager interface. */\nexport namespace memory {\n\n /** Gets the size of the memory in pages. */\n // @ts-ignore: decorator\n @builtin\n export declare function size(): i32;\n\n /** Grows the memory by the given size in pages and returns the previous size in pages. */\n // @ts-ignore: decorator\n @unsafe @builtin\n export declare function grow(pages: i32): i32;\n\n /** Fills a section in memory with the specified byte value. */\n // @ts-ignore: decorator\n @unsafe @builtin\n export function fill(dst: usize, c: u8, n: usize): void {\n memset(dst, c, n); // fallback if \"bulk-memory\" isn't enabled\n }\n\n /** Copies a section of memory to another. Has move semantics. */\n // @ts-ignore: decorator\n @unsafe @builtin\n export function copy(dst: usize, src: usize, n: usize): void {\n memmove(dst, src, n); // fallback if \"bulk-memory\" isn't enabled\n }\n\n /** Initializes a memory segment. */\n // @ts-ignore: decorator\n @unsafe\n export function init(segmentIndex: u32, srcOffset: usize, dstOffset: usize, n: usize): void {\n throw new Error(E_NOTIMPLEMENTED);\n }\n\n /** Drops a memory segment. */\n // @ts-ignore: decorator\n @unsafe\n export function drop(segmentIndex: u32): void {\n throw new Error(E_NOTIMPLEMENTED);\n }\n\n /** Repeats a section of memory at a specific address. */\n // @ts-ignore: decorator\n @unsafe\n export function repeat(dst: usize, src: usize, srcLength: usize, count: usize): void {\n var index: usize = 0;\n var total = srcLength * count;\n while (index < total) {\n memory.copy(dst + index, src, srcLength);\n index += srcLength;\n }\n }\n\n /** Compares a section of memory to another. */\n // @ts-ignore: decorator\n @inline\n export function compare(vl: usize, vr: usize, n: usize): i32 {\n return memcmp(vl, vr, n);\n }\n\n /** Gets a pointer to a static chunk of memory of the given size. */\n // @ts-ignore: decorator\n @builtin\n export declare function data<T>(size: T, align?: i32): usize;\n}\n\n// @ts-ignore: decorator\n@builtin\nexport declare const __heap_base: usize;\n\n/** Heap memory interface. */\nexport namespace heap {\n\n /** Allocates a chunk of memory of at least the specified size. */\n // @ts-ignore: decorator\n @unsafe export function alloc(size: usize): usize {\n return __alloc(size);\n }\n\n /** Reallocates a chunk of memory to have at least the specified size. */\n // @ts-ignore: decorator\n @unsafe export function realloc(ptr: usize, size: usize): usize {\n return __realloc(ptr, size);\n }\n\n /** Frees a chunk of memory. Does hardly anything (most recent block only) with the stub/none runtime. */\n // @ts-ignore: decorator\n @unsafe export function free(ptr: usize): void {\n __free(ptr);\n }\n\n /** Dangerously resets the entire heap. Specific to the stub/none runtime. */\n // @ts-ignore: decorator\n @unsafe export function reset(): void {\n __reset();\n }\n}\n","export function memcpy(dest: usize, src: usize, n: usize): void { // see: musl/src/string/memcpy.c\n var w: u32, x: u32;\n\n // copy 1 byte each until src is aligned to 4 bytes\n while (n && (src & 3)) {\n store<u8>(dest++, load<u8>(src++));\n n--;\n }\n\n // if dst is aligned to 4 bytes as well, copy 4 bytes each\n if ((dest & 3) == 0) {\n while (n >= 16) {\n store<u32>(dest , load<u32>(src ));\n store<u32>(dest + 4, load<u32>(src + 4));\n store<u32>(dest + 8, load<u32>(src + 8));\n store<u32>(dest + 12, load<u32>(src + 12));\n src += 16; dest += 16; n -= 16;\n }\n if (n & 8) {\n store<u32>(dest , load<u32>(src ));\n store<u32>(dest + 4, load<u32>(src + 4));\n dest += 8; src += 8;\n }\n if (n & 4) {\n store<u32>(dest, load<u32>(src));\n dest += 4; src += 4;\n }\n if (n & 2) { // drop to 2 bytes each\n store<u16>(dest, load<u16>(src));\n dest += 2; src += 2;\n }\n if (n & 1) { // drop to 1 byte\n store<u8>(dest++, load<u8>(src++));\n }\n return;\n }\n\n // if dst is not aligned to 4 bytes, use alternating shifts to copy 4 bytes each\n // doing shifts if faster when copying enough bytes (here: 32 or more)\n if (n >= 32) {\n switch (<u32>dest & 3) {\n // known to be != 0\n case 1: {\n w = load<u32>(src);\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n n -= 3;\n while (n >= 17) {\n x = load<u32>(src + 1);\n store<u32>(dest, w >> 24 | x << 8);\n w = load<u32>(src + 5);\n store<u32>(dest + 4, x >> 24 | w << 8);\n x = load<u32>(src + 9);\n store<u32>(dest + 8, w >> 24 | x << 8);\n w = load<u32>(src + 13);\n store<u32>(dest + 12, x >> 24 | w << 8);\n src += 16; dest += 16; n -= 16;\n }\n break;\n }\n case 2: {\n w = load<u32>(src);\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n n -= 2;\n while (n >= 18) {\n x = load<u32>(src + 2);\n store<u32>(dest, w >> 16 | x << 16);\n w = load<u32>(src + 6);\n store<u32>(dest + 4, x >> 16 | w << 16);\n x = load<u32>(src + 10);\n store<u32>(dest + 8, w >> 16 | x << 16);\n w = load<u32>(src + 14);\n store<u32>(dest + 12, x >> 16 | w << 16);\n src += 16; dest += 16; n -= 16;\n }\n break;\n }\n case 3: {\n w = load<u32>(src);\n store<u8>(dest++, load<u8>(src++));\n n -= 1;\n while (n >= 19) {\n x = load<u32>(src + 3);\n store<u32>(dest, w >> 8 | x << 24);\n w = load<u32>(src + 7);\n store<u32>(dest + 4, x >> 8 | w << 24);\n x = load<u32>(src + 11);\n store<u32>(dest + 8, w >> 8 | x << 24);\n w = load<u32>(src + 15);\n store<u32>(dest + 12, x >> 8 | w << 24);\n src += 16; dest += 16; n -= 16;\n }\n break;\n }\n }\n }\n\n // copy remaining bytes one by one\n if (n & 16) {\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(src++));\n store<u8>(dest++, load<u8>(